Demos Applications Components Optimizers Experiments Datasets

IrregularTest_Float

Convert from 7 bands to 5; this is meant to not divide evenly for testing. (32-bit)


Project maintained by SimiaCryptus Java, CuDNN, and CUDA are others' trademarks. No endorsement is implied.
  1. Serialization
    1. Raw Json
  2. Example Input/Output Pair
  3. Batch Execution
  4. Differential Validation
    1. Feedback Validation
    2. Learning Validation

Target Description: This is the general convolution layer, allowing any number of input and output bands. During execution it delegates processing to a dynamically created subbnet created using SimpleConvolutionLayer and ImgConcatLayer to implement the more general layer contract.

Report Description: Convert from 7 bands to 5; this is meant to not divide evenly for testing. (32-bit)

Serialization

This run will demonstrate the layer’s JSON serialization, and verify deserialization integrity.

Raw Json

Code from SerializationTest.java:84 executed in 0.00 seconds:

    final JsonObject json = layer.getJson();
    final NNLayer echo = NNLayer.fromJson(json);
    if (echo == null) throw new AssertionError("Failed to deserialize");
    if (layer == echo) throw new AssertionError("Serialization did not copy");
    if (!layer.equals(echo)) throw new AssertionError("Serialization not equal");
    return new GsonBuilder().setPrettyPrinting().create().toJson(json);

Returns:

    {
      "class": "com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer",
      "id": "18e368d2-1eff-45f5-a609-1127bc91712f",
      "isFrozen": false,
      "name": "ConvolutionLayer/18e368d2-1eff-45f5-a609-1127bc91712f",
      "filter": [
        [
          [
            -1.872,
            -1.856,
            1.088
          ],
          [
            1.256,
            -1.512,
            -0.628
          ],
          [
            0.544,
            1.564,
            -1.34
          ]
        ],
        [
          [
            1.268,
            1.912,
            1.32
          ],
          [
            -0.612,
            -1.36,
            1.68
          ],
          [
            -1.812,
            -0.856,
            1.384
          ]
        ],
        [
          [
            1.956,
            -1.78,
            0.3
          ],
          [
            -1.904,
            -1.356,
            0.544
          ],
          [
            -1.116,
            0.248,
            0.14
          ]
        ],
        [
          [
            -0.896,
            1.024,
            -1.404
          ],
          [
            1.856,
            0.212,
            0.204
          ],
          [
            -0.912,
            -0.684,
            0.812
          ]
        ],
        [
          [
            -0.052,
            1.596,
            -1.748
          ],
          [
            0.816,
            -0.192,
            0.428
          ],
          [
            1.744,
            -1.748,
            -0.72
          ]
        ],
        [
          [
            -0.612,
            -0.144,
            1.888
          ],
          [
            0.524,
            -1.584,
            -0.96
          ],
          [
            -0.756,
            -0.804,
            -1.096
          ]
        ],
        [
          [
            -0.576,
            1.0,
            -1.34
          ],
          [
            1.58,
            -1.068,
            0.512
          ],
          [
            1.556,
            -0.504,
            0.736
          ]
        ],
        [
          [
            0.0,
            0.676,
            -0.4
          ],
          [
            0.216,
            1.096,
            1.852
          ],
          [
            -0.18,
            -0.32,
            -0.832
          ]
        ],
        [
          [
            -1.76,
            -0.2,
            -1.628
          ],
          [
            -0.596,
            1.22,
            -1.692
          ],
          [
            -1.584,
            -0.408,
            -0.92
          ]
        ],
        [
          [
            -1.416,
            -0.756,
            1.028
          ],
          [
            -0.528,
            0.98,
            1.692
          ],
          [
            -0.568,
            -1.788,
            -0.052
          ]
        ],
        [
          [
            1.608,
            1.164,
            1.124
          ],
          [
            0.536,
            0.496,
            -0.752
          ],
          [
            0.448,
            0.392,
            1.892
          ]
        ],
        [
          [
            0.088,
            -1.352,
            -1.616
          ],
          [
            -1.212,
            -1.02,
            1.4
          ],
          [
            1.884,
            -0.22,
            -1.272
          ]
        ],
        [
          [
            0.724,
            -1.864,
            1.292
          ],
          [
            1.244,
            -1.772,
            -0.128
          ],
          [
            -0.324,
            0.544,
            -0.068
          ]
        ],
        [
          [
            -1.548,
            -1.552,
            -1.532
          ],
          [
            0.844,
            0.784,
            1.148
          ],
          [
            -0.772,
            -0.048,
            1.148
          ]
        ],
        [
          [
            1.784,
            -0.24,
            -1.632
          ],
          [
            1.708,
            1.232,
            0.2
          ],
          [
            -1.908,
            0.196,
            -1.492
          ]
        ],
        [
          [
            -1.24,
            -0.752,
            -1.068
          ],
          [
            -0.532,
            0.804,
            1.544
          ],
          [
            1.7,
            0.388,
            -0.652
          ]
        ],
        [
          [
            -0.736,
            -0.124,
            0.252
          ],
          [
            -1.428,
            -1.844,
            0.944
          ],
          [
            -0.728,
            -0.228,
            0.304
          ]
        ],
        [
          [
            -1.928,
            -0.368,
            0.62
          ],
          [
            -0.1,
            -0.408,
            0.592
          ],
          [
            0.592,
            0.688,
            -1.352
          ]
        ],
        [
          [
            -1.316,
            1.288,
            -0.06
          ],
          [
            -0.552,
            -0.764,
            1.26
          ],
          [
            -1.336,
            -1.272,
            -1.268
          ]
        ],
        [
          [
            1.052,
            0.116,
            -1.248
          ],
          [
            -0.748,
            -1.156,
            -1.468
          ],
          [
            -1.776,
            1.532,
            -1.86
          ]
        ],
        [
          [
            -1.968,
            0.416,
            -0.944
          ],
          [
            -1.704,
            0.732,
            1.964
          ],
          [
            -1.364,
            0.272,
            -0.78
          ]
        ],
        [
          [
            1.592,
            0.884,
            1.076
          ],
          [
            -0.704,
            -0.58,
            1.18
          ],
          [
            -0.288,
            1.7,
            -1.056
          ]
        ],
        [
          [
            -0.432,
            -1.924,
            0.716
          ],
          [
            -1.3,
            -0.564,
            -1.5
          ],
          [
            -0.608,
            1.832,
            -0.832
          ]
        ],
        [
          [
            0.684,
            -1.488,
            0.104
          ],
          [
            1.22,
            0.956,
            1.028
          ],
          [
            1.104,
            1.928,
            1.752
          ]
        ],
        [
          [
            0.008,
            1.756,
            0.164
          ],
          [
            0.844,
            -0.1,
            -1.568
          ],
          [
            1.052,
            -0.012,
            0.756
          ]
        ],
        [
          [
            1.856,
            1.96,
            1.516
          ],
          [
            -0.304,
            0.436,
            -0.896
          ],
          [
            0.78,
            -1.124,
            0.06
          ]
        ],
        [
          [
            -1.832,
            0.048,
            0.088
          ],
          [
            0.688,
            -1.544,
            -1.588
          ],
          [
            0.624,
            0.208,
            -0.46
          ]
        ],
        [
          [
            -1.3,
            -0.072,
            0.532
          ],
          [
            -0.86,
            0.168,
            -0.292
          ],
          [
            -1.628,
            1.088,
            -1.476
          ]
        ],
        [
          [
            0.036,
            1.196,
            1.32
          ],
          [
            0.764,
            1.5,
            1.736
          ],
          [
            -0.66,
            -1.42,
            -1.788
          ]
        ],
        [
          [
            0.828,
            0.32,
            -1.564
          ],
          [
            -1.584,
            -0.512,
            -0.512
          ],
          [
            -1.0,
            1.288,
            -1.388
          ]
        ],
        [
          [
            1.772,
            1.876,
            0.252
          ],
          [
            1.364,
            -0.268,
            0.28
          ],
          [
            0.876,
            1.228,
            -0.804
          ]
        ],
        [
          [
            -1.292,
            0.812,
            1.612
          ],
          [
            -0.04,
            -1.792,
            -0.16
          ],
          [
            -0.72,
            1.268,
            -0.916
          ]
        ],
        [
          [
            -1.656,
            1.26,
            0.088
          ],
          [
            0.804,
            -1.06,
            -1.74
          ],
          [
            1.604,
            -0.068,
            1.304
          ]
        ],
        [
          [
            1.296,
            -1.516,
            -1.54
          ],
          [
            -0.384,
            -0.948,
            0.372
          ],
          [
            0.992,
            -1.104,
            -0.328
          ]
        ],
        [
          [
            -0.876,
            0.7,
            -0.976
          ],
          [
            -0.592,
            0.708,
            -0.308
          ],
          [
            1.128,
            1.868,
            -0.188
          ]
        ]
      ],
      "strideX": 1,
      "strideY": 1,
      "precision": "Float",
      "inputBands": 7,
      "outputBands": 5
    }

Wrote Model to ConvolutionLayer_IrregularTest_Float.json; 7233 characters

Example Input/Output Pair

Display input/output pairs from random executions:

Code from ReferenceIO.java:69 executed in 0.01 seconds:

    final SimpleEval eval = SimpleEval.run(layer, inputPrototype);
    return String.format("--------------------\nInput: \n[%s]\n--------------------\nOutput: \n%s\n--------------------\nDerivative: \n%s",
                         Arrays.stream(inputPrototype).map(t -> t.prettyPrint()).reduce((a, b) -> a + ",\n" + b).get(),
                         eval.getOutput().prettyPrint(),
                         Arrays.stream(eval.getDerivative()).map(t -> t.prettyPrint()).reduce((a, b) -> a + ",\n" + b).get());

Returns:

    --------------------
    Input: 
    [[
    	[ [ -1.292, -0.756, -0.532, 1.428, 0.152, -0.68, 1.488 ], [ 1.608, 0.148, -0.172, -1.94, -1.912, -0.884, -0.748 ], [ -1.344, -1.96, 1.62, -1.432, 1.732, -0.896, 0.16 ], [ 1.92, 0.692, -0.548, 1.536, -1.596, 0.248, 1.096 ], [ -1.932, -0.332, -0.392, 1.936, -0.78, -1.328, 0.48 ] ],
    	[ [ -1.82, 0.524, -0.044, -0.7, -0.808, -0.864, -0.716 ], [ -0.144, 0.484, 1.784, -1.588, 0.04, -0.832, 0.888 ], [ 0.016, -0.876, -1.156, 1.74, 0.336, 0.576, 0.664 ], [ 1.032, 0.488, -0.352, -0.448, -1.908, -0.104, -1.752 ], [ 0.412, -0.316, -1.364, 1.316, -0.868, -0.012, 1.112 ] ],
    	[ [ -1.564, -1.464, 0.904, 1.148, -1.752, -1.996, -1.344 ], [ -1.916, -1.816, 1.452, 1.7, 0.18, -0.308, 0.556 ], [ 1.024, -1.384, 0.308, -0.668, 1.044, 1.744, 0.492 ], [ 1.548, -0.708, 1.084, 0.464, 1.296, 0.404, 0.728 ], [ 1.808, -0.468, -0.828, -1.276, -0.16, 1.06, 0.176 ] ],
    	[ [ 0.64, 0.472, 1.396, 1.944, -1.368, -1.6, 0.572 ], [ 0.228, 0.8, -1.648, -1.268, 0.24, 1.796, 0.072 ], [ -0.996, -0.72, 1.836, -0.592, 0.152, -0.532, -1.344 ], [ -1.052, -1.068, -1.46, 1.156, 0.9, 0.968, 0.016 ], [ 0.284, -1.804, 1.664, 1.276, -0.064, -1.456, 1.88 ] ],
    	[ [ 1.38, 0.896, -1.936, 0.176, 0.92, -0.704, -0.928 ], [ -1.888, 1.996, 0.54, 1.712, 1.216, -1.164, 0.436 ], [ -0.424, 0.992, 0.468, -1.576, 1.72, -0.172, 1.124 ], [ -0.732, 0.936, -1.988, -0.3, 0.492, -0.236, 1.668 ], [ -1.452, -0.244, 0.988, -0.688, 1.848, 0.288, 1.736 ] ]
    ]]
    --------------------
    Output: 
    [
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ]
    ]
    --------------------
    Derivative: 
    [
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ]
    ]

GPU Log

Batch Execution

Most layers, including this one, should behave the same no matter how the items are split between batches. We verify this:

Code from BatchingTester.java:113 executed in 0.04 seconds:

    return test(reference, inputPrototype);

Returns:

    ToleranceStatistics{absoluteTol=0.0000e+00 +- 0.0000e+00 [0.0000e+00 - 0.0000e+00] (3000#), relativeTol=0.0000e+00 +- 0.0000e+00 [Infinity - -Infinity] (0#)}

Differential Validation

Code from SingleDerivativeTester.java:292 executed in 0.00 seconds:

    log.info(String.format("Inputs: %s", Arrays.stream(inputPrototype).map(t -> t.prettyPrint()).reduce((a, b) -> a + ",\n" + b).get()));
    log.info(String.format("Inputs Statistics: %s", Arrays.stream(inputPrototype).map(x -> new ScalarStatistics().add(x.getData()).toString()).reduce((a, b) -> a + ",\n" + b).get()));
    log.info(String.format("Output: %s", outputPrototype.prettyPrint()));
    log.info(String.format("Outputs Statistics: %s", new ScalarStatistics().add(outputPrototype.getData())));

Logging:

    Inputs: [
    	[ [ -0.096, -0.148, -0.432, 1.088, 1.368, 0.752, -1.952 ], [ -0.78, -0.948, 1.6, -0.824, -1.16, 1.112, -1.068 ], [ -1.768, 1.764, 0.108, -0.608, 0.092, -1.224, 1.512 ], [ 0.804, -1.804, 1.784, 1.028, 0.792, -0.092, -1.988 ], [ 1.876, 0.26, -0.68, -1.456, 0.548, 1.316, 1.272 ] ],
    	[ [ -1.144, 0.576, 0.932, 1.036, -1.348, 1.392, 0.828 ], [ -1.276, 1.008, -0.9, -0.896, 1.732, -1.768, 0.672 ], [ -1.336, 0.244, -1.936, -0.704, 0.316, -1.792, 1.876 ], [ -1.592, -0.24, 0.312, -0.204, -0.868, -0.588, 0.544 ], [ -0.092, 0.244, -1.772, -1.264, -1.58, 1.996, 0.608 ] ],
    	[ [ -0.508, 1.536, 1.432, 0.828, 0.34, -0.596, 0.364 ], [ 0.212, -1.94, 1.04, -0.62, -1.68, -0.74, 1.492 ], [ -1.944, 1.568, 1.232, 0.544, -0.604, 0.356, 0.68 ], [ -0.54, 1.168, 1.936, 1.472, -1.592, -1.864, 0.34 ], [ 1.812, 0.84, -1.168, -1.48, -1.176, 0.972, -1.772 ] ],
    	[ [ -1.812, -0.28, 0.576, 1.276, 0.292, -0.592, 0.844 ], [ 0.192, -0.556, 1.124, 1.212, 0.668, 1.744, -1.336 ], [ 0.704, 0.328, -1.868, -0.096, 0.352, -0.264, -0.568 ], [ 1.536, 0.052, 0.572, -0.668, 1.292, 0.488, 0.944 ], [ -0.704, -1.088, 0.036, 1.424, 1.66, -1.22, 0.092 ] ],
    	[ [ 0.644, -0.552, 1.036, 1.304, -0.184, 0.428, 0.604 ], [ 0.876, -0.612, 0.144, 0.42, 1.788, -0.752, -1.732 ], [ -1.88, 0.168, -1.44, -0.424, -0.732, -0.928, -1.18 ], [ 0.992, 0.188, -1.436, -0.544, 1.648, -0.116, 0.44 ], [ -1.596, -1.328, 0.744, 0.968, -0.648, -0.328, 0.632 ] ]
    ]
    Inputs Statistics: {meanExponent=-0.1302413046005952, negative=82, min=0.632, max=0.632, mean=-1.600000000000382E-4, count=175, positive=93, stdDev=1.111801615191693, zeros=0}
    Output: [
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ],
    	[ [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ]
    ]
    Outputs Statistics: {meanExponent=NaN, negative=0, min=0.0, max=0.0, mean=0.0, count=125, positive=0, stdDev=0.0, zeros=125}
    

Feedback Validation

We validate the agreement between the implemented derivative of the inputs with finite difference estimations:

Code from SingleDerivativeTester.java:303 executed in 0.65 seconds:

    return testFeedback(statistics, component, inputPrototype, outputPrototype);

Logging:

    Feedback for input 0
    Inputs Values: [
    	[ [ -0.096, -0.148, -0.432, 1.088, 1.368, 0.752, -1.952 ], [ -0.78, -0.948, 1.6, -0.824, -1.16, 1.112, -1.068 ], [ -1.768, 1.764, 0.108, -0.608, 0.092, -1.224, 1.512 ], [ 0.804, -1.804, 1.784, 1.028, 0.792, -0.092, -1.988 ], [ 1.876, 0.26, -0.68, -1.456, 0.548, 1.316, 1.272 ] ],
    	[ [ -1.144, 0.576, 0.932, 1.036, -1.348, 1.392, 0.828 ], [ -1.276, 1.008, -0.9, -0.896, 1.732, -1.768, 0.672 ], [ -1.336, 0.244, -1.936, -0.704, 0.316, -1.792, 1.876 ], [ -1.592, -0.24, 0.312, -0.204, -0.868, -0.588, 0.544 ], [ -0.092, 0.244, -1.772, -1.264, -1.58, 1.996, 0.608 ] ],
    	[ [ -0.508, 1.536, 1.432, 0.828, 0.34, -0.596, 0.364 ], [ 0.212, -1.94, 1.04, -0.62, -1.68, -0.74, 1.492 ], [ -1.944, 1.568, 1.232, 0.544, -0.604, 0.356, 0.68 ], [ -0.54, 1.168, 1.936, 1.472, -1.592, -1.864, 0.34 ], [ 1.812, 0.84, -1.168, -1.48, -1.176, 0.972, -1.772 ] ],
    	[ [ -1.812, -0.28, 0.576, 1.276, 0.292, -0.592, 0.844 ], [ 0.192, -0.556, 1.124, 1.212, 0.668, 1.744, -1.336 ], [ 0.704, 0.328, -1.868, -0.096, 0.352, -0.264, -0.568 ], [ 1.536, 0.052, 0.572, -0.668, 1.292, 0.488, 0.944 ], [ -0.704, -1.088, 0.036, 1.424, 1.66, -1.22, 0.092 ] ],
    	[ [ 0.644, -0.552, 1.036, 1.304, -0.184, 0.428, 0.604 ], [ 0.876, -0.612, 0.144, 0.42, 1.788, -0.752, -1.732 ], [ -1.88, 0.168, -1.44, -0.424, -0.732, -0.928, -1.18 ], [ 0.992, 0.188, -1.436, -0.544, 1.648, -0.116, 0.44 ], [ -1.596, -1.328, 0.744, 0.968, -0.648, -0.328, 0.632 ] ]
    ]
    Value Statistics: {meanExponent=-0.1302413046005952, negative=82, min=0.632, max=0.632, mean=-1.600000000000382E-4, count=175, positive=93, stdDev=1.111801615191693, zeros=0}
    Implemented Feedback: [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], ... ]
    Implemented Statistics: {meanExponent=NaN, negative=0, min=0.0, max=0.0, mean=0.0, count=21875, positive=0, stdDev=0.0, zeros=21875}
    Measured Feedback: [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], ... ]
    Measured Statistics: {meanExponent=NaN, negative=0, min=0.0, max=0.0, mean=0.0, count=21875, positive=0, stdDev=0.0, zeros=21875}
    Feedback Error: [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], ... ]
    Error Statistics: {meanExponent=NaN, negative=0, min=0.0, max=0.0, mean=0.0, count=21875, positive=0, stdDev=0.0, zeros=21875}
    

Returns:

    ToleranceStatistics{absoluteTol=0.0000e+00 +- 0.0000e+00 [0.0000e+00 - 0.0000e+00] (21875#), relativeTol=0.0000e+00 +- 0.0000e+00 [Infinity - -Infinity] (0#)}

Learning Validation

We validate the agreement between the implemented derivative of the internal weights with finite difference estimations:

Code from SingleDerivativeTester.java:311 executed in 0.00 seconds:

    return testLearning(statistics, component, inputPrototype, outputPrototype);

Logging:

    Learning Gradient for weight setByCoord 0
    Implemented Gradient: [ [ -1.2760000228881836, 0.21199999749660492, 0.19200000166893005, 0.8759999871253967, 0.0, -1.3359999656677246, -1.944000005722046, 0.7039999961853027, ... ], [ -0.7799999713897705, -1.2760000228881836, 0.21199999749660492, 0.19200000166893005, 0.8759999871253967, -1.7680000066757202, -1.3359999656677246, -1.944000005722046, ... ], [ 0.0, -0.7799999713897705, -1.2760000228881836, 0.21199999749660492, 0.19200000166893005, 0.0, -1.7680000066757202, -1.3359999656677246, ... ], [ -1.1440000534057617, -0.5080000162124634, -1.812000036239624, 0.6439999938011169, 0.0, -1.2760000228881836, 0.21199999749660492, 0.19200000166893005, ... ], [ -0.09600000083446503, -1.1440000534057617, -0.5080000162124634, -1.812000036239624, 0.6439999938011169, -0.7799999713897705, -1.2760000228881836, 0.21199999749660492, ... ], [ 0.0, -0.09600000083446503, -1.1440000534057617, -0.5080000162124634, -1.812000036239624, 0.0, -0.7799999713897705, -1.2760000228881836, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, -1.1440000534057617, -0.5080000162124634, -1.812000036239624, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, -0.09600000083446503, -1.1440000534057617, -0.5080000162124634, ... ], ... ]
    Implemented Statistics: {meanExponent=-0.12764724074596737, negative=2775, min=0.9440000057220459, max=0.9440000057220459, mean=-0.0017945396370357936, count=39375, positive=3140, stdDev=0.43386778897468015, zeros=33460}
    Measured Gradient: [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... ], ... ]
    Measured Statistics: {meanExponent=NaN, negative=0, min=0.0, max=0.0, mean=0.0, count=39375, positive=0, stdDev=0.0, zeros=39375}
    Gradient Error: [ [ 1.2760000228881836, -0.21199999749660492, -0.19200000166893005, -0.8759999871253967, 0.0, 1.3359999656677246, 1.944000005722046, -0.7039999961853027, ... ], [ 0.7799999713897705, 1.2760000228881836, -0.21199999749660492, -0.19200000166893005, -0.8759999871253967, 1.7680000066757202, 1.3359999656677246, 1.944000005722046, ... ], [ 0.0, 0.7799999713897705, 1.2760000228881836, -0.21199999749660492, -0.19200000166893005, 0.0, 1.7680000066757202, 1.3359999656677246, ... ], [ 1.1440000534057617, 0.5080000162124634, 1.812000036239624, -0.6439999938011169, 0.0, 1.2760000228881836, -0.21199999749660492, -0.19200000166893005, ... ], [ 0.09600000083446503, 1.1440000534057617, 0.5080000162124634, 1.812000036239624, -0.6439999938011169, 0.7799999713897705, 1.2760000228881836, -0.21199999749660492, ... ], [ 0.0, 0.09600000083446503, 1.1440000534057617, 0.5080000162124634, 1.812000036239624, 0.0, 0.7799999713897705, 1.2760000228881836, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 1.1440000534057617, 0.5080000162124634, 1.812000036239624, ... ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.09600000083446503, 1.1440000534057617, 0.5080000162124634, ... ], ... ]
    Error Statistics: {meanExponent=-0.12764724074596737, negative=3140, min=-0.9440000057220459, max=-0.9440000057220459, mean=0.0017945396370357936, count=39375, positive=2775, stdDev=0.43386778897468015, zeros=33460}
    

Returns:

    java.lang.AssertionError: ToleranceStatistics{absoluteTol=1.4506e-01 +- 4.0890e-01 [0.0000e+00 - 1.9960e+00] (39375#), relativeTol=1.0000e+00 +- 0.0000e+00 [1.0000e+00 - 1.0000e+00] (5915#)}
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.lambda$testLearning$23(SingleDerivativeTester.java:353)
    	at java.util.stream.IntPipeline$4$1.accept(IntPipeline.java:250)
    	at java.util.stream.Streams$RangeIntSpliterator.forEachRemaining(Streams.java:110)
    	at java.util.Spliterator$OfInt.forEachRemaining(Spliterator.java:693)
    	at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481)
    	at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)
    	at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
    	at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
    	at java.util.stream.ReferencePipeline.reduce(ReferencePipeline.java:479)
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.testLearning(SingleDerivativeTester.java:386)
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.lambda$test$18(SingleDerivativeTester.java:312)
    	at com.simiacryptus.util.io.MarkdownNotebookOutput.lambda$null$1(MarkdownNotebookOutput.java:205)
    	at com.simiacryptus.util.lang.TimedResult.time(TimedResult.java:59)
    	at com.simiacryptus.util.io.MarkdownNotebookOutput.lambda$code$2(MarkdownNotebookOutput.java:205)
    	at com.simiacryptus.util.test.SysOutInterceptor.withOutput(SysOutInterceptor.java:107)
    	at com.simiacryptus.util.io.MarkdownNotebookOutput.code(MarkdownNotebookOutput.java:203)
    	at com.simiacryptus.util.io.NotebookOutput.code(NotebookOutput.java:82)
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.test(SingleDerivativeTester.java:311)
    	at com.simiacryptus.mindseye.test.unit.SingleDerivativeTester.test(SingleDerivativeTester.java:42)
    	at com.simiacryptus.mindseye.test.unit.StandardLayerTests.lambda$run$5(StandardLayerTests.java:257)
    	at java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:184)
    	at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175)
    	at java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1374)
    	at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481)
    	at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)
    	at java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:151)
    	at java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:174)
    	at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
    	at java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:418)
    	at com.simiacryptus.mindseye.test.unit.StandardLayerTests.run(StandardLayerTests.java:256)
    	at com.simiacryptus.mindseye.test.NotebookReportBase.lambda$run$0(NotebookReportBase.java:105)
    	at com.simiacryptus.util.lang.TimedResult.time(TimedResult.java:76)
    	at com.simiacryptus.mindseye.test.NotebookReportBase.run(NotebookReportBase.java:103)
    	at com.simiacryptus.mindseye.layers.LayerTestBase.test(LayerTestBase.java:37)
    	at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
    	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
    	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
    	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
    	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
    	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
    	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
    	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
    	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
    	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
    	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
    	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
    	at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
    	at org.junit.runners.Suite.runChild(Suite.java:128)
    	at org.junit.runners.Suite.runChild(Suite.java:27)
    	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
    	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
    	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
    	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
    	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
    	at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
    	at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
    	at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
    	at com.intellij.rt.execution.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:47)
    	at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:242)
    	at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:70)