Demos Applications Components Optimizers Experiments Datasets

Float

Basic 32-bit run


Project maintained by SimiaCryptus Java, CuDNN, and CUDA are others' trademarks. No endorsement is implied.
  1. Serialization
    1. Raw Json
  2. Example Input/Output Pair
  3. Batch Execution
  4. Differential Validation
    1. Feedback Validation

Target Description: Adds a scalar offset to the input based on color band (3rd tensor dimension)

Report Description: Basic 32-bit run

Serialization

This run will demonstrate the layer’s JSON serialization, and verify deserialization integrity.

Raw Json

Code from SerializationTest.java:84 executed in 0.00 seconds:

    final JsonObject json = layer.getJson();
    final NNLayer echo = NNLayer.fromJson(json);
    if (echo == null) throw new AssertionError("Failed to deserialize");
    if (layer == echo) throw new AssertionError("Serialization did not copy");
    if (!layer.equals(echo)) throw new AssertionError("Serialization not equal");
    return new GsonBuilder().setPrettyPrinting().create().toJson(json);

Returns:

    {
      "class": "com.simiacryptus.mindseye.layers.cudnn.ImgBandBiasLayer",
      "id": "81473423-7df2-4d99-a65a-7018c000e56a",
      "isFrozen": false,
      "name": "ImgBandBiasLayer/81473423-7df2-4d99-a65a-7018c000e56a",
      "bias": [
        -0.56,
        -1.848,
        -1.044
      ],
      "precision": "Float"
    }

Wrote Model to ImgBandBiasLayer_Float.json; 283 characters

Example Input/Output Pair

Display input/output pairs from random executions:

Code from ReferenceIO.java:69 executed in 0.00 seconds:

    final SimpleEval eval = SimpleEval.run(layer, inputPrototype);
    return String.format("--------------------\nInput: \n[%s]\n--------------------\nOutput: \n%s\n--------------------\nDerivative: \n%s",
                         Arrays.stream(inputPrototype).map(t -> t.prettyPrint()).reduce((a, b) -> a + ",\n" + b).get(),
                         eval.getOutput().prettyPrint(),
                         Arrays.stream(eval.getDerivative()).map(t -> t.prettyPrint()).reduce((a, b) -> a + ",\n" + b).get());

Returns:

    --------------------
    Input: 
    [[
    	[ [ -1.7, -0.68, -1.296 ], [ -0.036, -1.24, -1.372 ], [ 0.068, 0.372, 1.368 ], [ -0.124, 1.748, 1.476 ], [ 1.148, 0.976, -0.572 ], [ -1.484, 0.604, 1.468 ], [ 1.756, 0.256, -1.908 ], [ 0.836, -1.356, -1.464 ] ],
    	[ [ -0.388, -0.924, 0.028 ], [ -1.228, -1.712, -0.6 ], [ -0.224, -0.924, -0.992 ], [ 0.748, 0.692, 0.092 ], [ 0.852, -1.288, -0.56 ], [ -0.332, -0.348, 1.712 ], [ 0.108, 0.848, -0.916 ], [ -0.824, -0.404, -0.284 ] ],
    	[ [ 0.58, -1.272, -1.832 ], [ 0.02, 1.544, -1.596 ], [ 1.288, -0.64, -0.556 ], [ -0.512, 0.292, -1.176 ], [ -0.824, -1.028, 1.212 ], [ 1.084, -0.568, -1.532 ], [ -1.744, 0.184, 0.508 ], [ -1.244, -1.496, 0.02 ] ],
    	[ [ -1.848, 0.212, -1.324 ], [ -0.364, 1.94, -0.22 ], [ 0.8, 1.864, -0.66 ], [ 1.932, -1.74, -0.856 ], [ -1.748, 0.456, 0.752 ], [ -1.684, 1.612, -1.564 ], [ 0.928, -1.248, -1.312 ], [ 1.852, 0.576, -0.816 ] ],
    	[ [ 1.064, -1.064, 1.164 ], [ -0.744, 1.38, 0.596 ], [ 0.852, 0.608, 0.248 ], [ 0.024, -1.448, 1.8 ], [ -0.864, 0.128, -0.108 ], [ 1.56, -1.044, 1.724 ], [ -0.416, -0.76, -1.112 ], [ 1.728, 0.92, -0.82 ] ],
    	[ [ 0.532, -0.652, 0.316 ], [ -0.54, 0.016, 0.228 ], [ -0.74, 0.488, -0.064 ], [ 0.652, 0.816, -0.004 ], [ -1.416, -0.696, -1.564 ], [ -0.388, -0.584, 1.48 ], [ 0.808, -1.08, -1.136 ], [ 1.524, -1.804, -0.112 ] ],
    	[ [ -0.724, 1.928, -1.552 ], [ 1.144, -0.648, 0.696 ], [ 0.648, -1.544, -1.616 ], [ -0.596, -1.432, 0.592 ], [ 0.328, 1.792, -1.12 ], [ 1.576, -1.164, -0.656 ], [ 1.572, -0.384, 1.644 ], [ 0.996, 0.868, 1.976 ] ],
    	[ [ -0.612, -0.916, 0.468 ], [ -0.584, 1.06, -1.68 ], [ 1.144, -0.752, 0.596 ], [ 1.284, -0.712, 0.736 ], [ -0.52, 0.296, 1.108 ], [ 1.772, 1.536, 1.928 ], [ 0.104, -0.216, 1.54 ], [ -0.528, -0.496, -0.896 ] ]
    ]]
    --------------------
    Output: 
    [
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ]
    ]
    --------------------
    Derivative: 
    [
    	[ [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ] ],
    	[ [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ] ],
    	[ [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ] ],
    	[ [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ] ],
    	[ [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ] ],
    	[ [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ] ],
    	[ [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ] ],
    	[ [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ], [ 1.0, 1.0, 1.0 ] ]
    ]

GPU Log

Batch Execution

Most layers, including this one, should behave the same no matter how the items are split between batches. We verify this:

Code from BatchingTester.java:113 executed in 0.01 seconds:

    return test(reference, inputPrototype);

Returns:

    ToleranceStatistics{absoluteTol=0.0000e+00 +- 0.0000e+00 [0.0000e+00 - 0.0000e+00] (3840#), relativeTol=0.0000e+00 +- 0.0000e+00 [0.0000e+00 - 0.0000e+00] (1920#)}

Differential Validation

Code from SingleDerivativeTester.java:292 executed in 0.00 seconds:

    log.info(String.format("Inputs: %s", Arrays.stream(inputPrototype).map(t -> t.prettyPrint()).reduce((a, b) -> a + ",\n" + b).get()));
    log.info(String.format("Inputs Statistics: %s", Arrays.stream(inputPrototype).map(x -> new ScalarStatistics().add(x.getData()).toString()).reduce((a, b) -> a + ",\n" + b).get()));
    log.info(String.format("Output: %s", outputPrototype.prettyPrint()));
    log.info(String.format("Outputs Statistics: %s", new ScalarStatistics().add(outputPrototype.getData())));

Logging:

    Inputs: [
    	[ [ 0.648, -1.888, 0.732 ], [ -0.876, 1.848, -1.052 ], [ 1.892, -0.668, 0.896 ], [ -0.692, -0.764, -1.056 ], [ 0.492, 1.52, -1.02 ], [ -1.1, -0.688, -1.852 ], [ -1.028, -0.116, 0.912 ], [ -1.564, -1.636, 0.004 ] ],
    	[ [ 0.104, -1.096, -1.26 ], [ 0.428, 0.836, -0.408 ], [ -1.18, 1.24, -1.832 ], [ 1.42, -0.804, -1.664 ], [ -0.844, 0.964, 1.636 ], [ 0.284, 1.084, 1.196 ], [ -1.692, -1.108, -1.44 ], [ 0.488, 1.144, 0.584 ] ],
    	[ [ -1.384, 1.304, -1.76 ], [ -1.448, 0.088, -0.608 ], [ -1.692, -0.832, 1.792 ], [ -0.604, -0.212, -0.036 ], [ 0.132, 0.192, 1.34 ], [ -0.712, 0.332, 0.696 ], [ -0.192, 1.308, 0.108 ], [ -1.896, -1.608, 0.468 ] ],
    	[ [ -0.284, 1.86, -1.624 ], [ 0.556, -1.636, 0.976 ], [ 1.352, -0.44, -1.136 ], [ 1.368, 0.648, -1.06 ], [ 1.472, 0.688, -0.92 ], [ -0.248, -1.252, -1.656 ], [ 0.832, 0.612, -1.34 ], [ 0.18, -0.812, 0.008 ] ],
    	[ [ 1.532, 0.256, -1.12 ], [ -1.976, -1.024, 0.256 ], [ 1.056, -0.896, 0.528 ], [ 1.52, 0.624, -0.936 ], [ 1.888, -1.248, -1.176 ], [ -1.12, 1.256, 0.096 ], [ 1.248, -1.068, 0.608 ], [ 0.036, 0.604, -1.388 ] ],
    	[ [ 0.128, -1.84, -1.336 ], [ -1.624, -0.28, -1.06 ], [ -1.088, 1.68, -1.224 ], [ 0.376, -1.536, -1.788 ], [ 1.652, -1.328, -0.124 ], [ -1.5, -0.212, 0.424 ], [ -1.104, -0.5, -1.344 ], [ 0.592, -1.84, 0.812 ] ],
    	[ [ 0.608, 0.628, -1.496 ], [ -0.112, 0.208, -0.296 ], [ 1.788, -0.328, -0.516 ], [ 0.488, -1.892, 0.424 ], [ -1.892, 1.6, -0.964 ], [ -0.888, -0.084, 0.54 ], [ -0.512, 0.328, -1.884 ], [ -0.912, -1.868, 1.308 ] ],
    	[ [ -0.348, 0.168, 1.404 ], [ -1.712, 0.22, -0.104 ], [ 0.2, 1.92, 0.4 ], [ 0.436, -1.04, 1.572 ], [ 1.648, 1.764, -0.212 ], [ -1.788, -0.6, 0.408 ], [ -1.292, -0.22, -0.36 ], [ 1.888, 0.308, -1.648 ] ]
    ]
    Inputs Statistics: {meanExponent=-0.14984760479535933, negative=103, min=-1.648, max=-1.648, mean=-0.17331249999999995, count=192, positive=89, stdDev=1.1099882254677673, zeros=0}
    Output: [
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0 ] ]
    ]
    Outputs Statistics: {meanExponent=NaN, negative=0, min=0.0, max=0.0, mean=0.0, count=192, positive=0, stdDev=0.0, zeros=192}
    

Feedback Validation

We validate the agreement between the implemented derivative of the inputs with finite difference estimations:

Code from SingleDerivativeTester.java:303 executed in 0.00 seconds:

    return testFeedback(statistics, component, inputPrototype, outputPrototype);

Logging:

    Feedback for input 0
    Inputs Values: [
    	[ [ 0.648, -1.888, 0.732 ], [ -0.876, 1.848, -1.052 ], [ 1.892, -0.668, 0.896 ], [ -0.692, -0.764, -1.056 ], [ 0.492, 1.52, -1.02 ], [ -1.1, -0.688, -1.852 ], [ -1.028, -0.116, 0.912 ], [ -1.564, -1.636, 0.004 ] ],
    	[ [ 0.104, -1.096, -1.26 ], [ 0.428, 0.836, -0.408 ], [ -1.18, 1.24, -1.832 ], [ 1.42, -0.804, -1.664 ], [ -0.844, 0.964, 1.636 ], [ 0.284, 1.084, 1.196 ], [ -1.692, -1.108, -1.44 ], [ 0.488, 1.144, 0.584 ] ],
    	[ [ -1.384, 1.304, -1.76 ], [ -1.448, 0.088, -0.608 ], [ -1.692, -0.832, 1.792 ], [ -0.604, -0.212, -0.036 ], [ 0.132, 0.192, 1.34 ], [ -0.712, 0.332, 0.696 ], [ -0.192, 1.308, 0.108 ], [ -1.896, -1.608, 0.468 ] ],
    	[ [ -0.284, 1.86, -1.624 ], [ 0.556, -1.636, 0.976 ], [ 1.352, -0.44, -1.136 ], [ 1.368, 0.648, -1.06 ], [ 1.472, 0.688, -0.92 ], [ -0.248, -1.252, -1.656 ], [ 0.832, 0.612, -1.34 ], [ 0.18, -0.812, 0.008 ] ],
    	[ [ 1.532, 0.256, -1.12 ], [ -1.976, -1.024, 0.256 ], [ 1.056, -0.896, 0.528 ], [ 1.52, 0.624, -0.936 ], [ 1.888, -1.248, -1.176 ], [ -1.12, 1.256, 0.096 ], [ 1.248, -1.068, 0.608 ], [ 0.036, 0.604, -1.388 ] ],
    	[ [ 0.128, -1.84, -1.336 ], [ -1.624, -0.28, -1.06 ], [ -1.088, 1.68, -1.224 ], [ 0.376, -1.536, -1.788 ], [ 1.652, -1.328, -0.124 ], [ -1.5, -0.212, 0.424 ], [ -1.104, -0.5, -1.344 ], [ 0.592, -1.84, 0.812 ] ],
    	[ [ 0.608, 0.628, -1.496 ], [ -0.112, 0.208, -0.296 ], [ 1.788, -0.328, -0.516 ], [ 0.488, -1.892, 0.424 ], [ -1.892, 1.6, -0.964 ], [ -0.888, -0.084, 0.54 ], [ -0.512, 0.328, -1.884 ], [ -0.912, -1.868, 1.308 ] ],
    	[ [ -0.348, 0.168, 1.404 ], [ -1.712, 0.22, -0.104 ], [ 0.2, 1.92, 0.4 ], [ 0.436, -1.04, 1.572 ], [ 1.648, 1.764, -0.212 ], [ -1.788, -0.6, 0.408 ], [ -1.292, -0.22, -0.36 ], [ 1.888, 0.308, -1.648 ] ]
    ]
    Value Statistics: {meanExponent=-0.14984760479535933, negative=103, min=-1.648, max=-1.648, mean=-0.17331249999999995, count=192, positive=89, stdDev=1.1099882254677673, zeros=0}
    Implemented Feedback: [ [ 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, ... ], ... ]
    Implemented Statistics: {meanExponent=0.0, negative=0, min=1.0, max=1.0, mean=0.005208333333333333, count=36864, positive=192, stdDev=0.07198059875565235, zeros=36672}
    Measured: [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], ... ]
    Measured Statistics: {meanExponent=NaN, negative=0, min=0.0, max=0.0, mean=0.0, count=36864, positive=0, stdDev=0.0, zeros=36864}
    Feedback Error: [ [ -1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0, ... ], ... ]
    Error Statistics: {meanExponent=0.0, negative=192, min=-1.0, max=-1.0, mean=-0.005208333333333333, count=36864, positive=0, stdDev=0.07198059875565235, zeros=36672}
    

Returns:

    java.lang.AssertionError: ToleranceStatistics{absoluteTol=5.2083e-03 +- 7.1981e-02 [0.0000e+00 - 1.0000e+00] (36864#), relativeTol=1.0000e+00 +- 0.0000e+00 [1.0000e+00 - 1.0000e+00] (192#)}
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.lambda$testFeedback$29(SingleDerivativeTester.java:407)
    	at java.util.stream.IntPipeline$4$1.accept(IntPipeline.java:250)
    	at java.util.stream.Streams$RangeIntSpliterator.forEachRemaining(Streams.java:110)
    	at java.util.Spliterator$OfInt.forEachRemaining(Spliterator.java:693)
    	at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481)
    	at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)
    	at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
    	at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
    	at java.util.stream.ReferencePipeline.reduce(ReferencePipeline.java:479)
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.testFeedback(SingleDerivativeTester.java:438)
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.lambda$test$17(SingleDerivativeTester.java:304)
    	at com.simiacryptus.util.io.MarkdownNotebookOutput.lambda$null$1(MarkdownNotebookOutput.java:205)
    	at com.simiacryptus.util.lang.TimedResult.time(TimedResult.java:59)
    	at com.simiacryptus.util.io.MarkdownNotebookOutput.lambda$code$2(MarkdownNotebookOutput.java:205)
    	at com.simiacryptus.util.test.SysOutInterceptor.withOutput(SysOutInterceptor.java:107)
    	at com.simiacryptus.util.io.MarkdownNotebookOutput.code(MarkdownNotebookOutput.java:203)
    	at com.simiacryptus.util.io.NotebookOutput.code(NotebookOutput.java:82)
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.test(SingleDerivativeTester.java:303)
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.test(SingleDerivativeTester.java:42)
    	at com.simiacryptus.mindseye.test.unit.StandardLayerTests.lambda$run$5(StandardLayerTests.java:257)
    	at java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:184)
    	at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
    	at java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1374)
    	at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481)
    	at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)
    	at java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:151)
    	at java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:174)
    	at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
    	at java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:418)
    	at com.simiacryptus.mindseye.test.unit.StandardLayerTests.run(StandardLayerTests.java:256)
    	at com.simiacryptus.mindseye.test.NotebookReportBase.lambda$run$0(NotebookReportBase.java:105)
    	at com.simiacryptus.util.lang.TimedResult.time(TimedResult.java:76)
    	at com.simiacryptus.mindseye.test.NotebookReportBase.run(NotebookReportBase.java:103)
    	at com.simiacryptus.mindseye.layers.LayerTestBase.test(LayerTestBase.java:37)
    	at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
    	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
    	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
    	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
    	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
    	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
    	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
    	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
    	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
    	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
    	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
    	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
    	at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
    	at org.junit.runners.Suite.runChild(Suite.java:128)
    	at org.junit.runners.Suite.runChild(Suite.java:27)
    	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
    	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
    	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
    	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
    	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
    	at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
    	at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
    	at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
    	at com.intellij.rt.execution.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:47)
    	at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:242)
    	at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:70)