isTraining = false;
} else if (event instanceof MiniBatchFinishedEvent && isTraining) {
MiniBatchFinishedEvent mbe = (MiniBatchFinishedEvent) event;
if (mbe.getBatchCount() % validationFrequency == 0) {
OneStepTrainer<?> t = (OneStepTrainer<?>) event.getSource();
NeuralNetwork n = t.getNeuralNetwork();
if (n.getLayerCalculator() != null) {
OutputError outputError = t.getOutputError();
outputError.reset();
inputProvider.reset();
ValuesProvider vp = mbe.getResults();
if (vp == null) {
vp = TensorFactory.tensorProvider(n, 1, Environment.getInstance().getUseDataSharedMemory());
}
if (vp.get(outputError) == null) {
vp.add(outputError, vp.get(n.getOutputLayer()).getDimensions());
}
TrainingInputData input = new TrainingInputDataImpl(vp.get(n.getInputLayer()), vp.get(outputError));
Set<Layer> calculatedLayers = new UniqueList<>();
for (int i = 0; i < inputProvider.getInputSize(); i++) {
inputProvider.populateNext(input);
calculatedLayers.clear();
calculatedLayers.add(n.getInputLayer());
n.getLayerCalculator().calculate(n, n.getOutputLayer(), calculatedLayers, vp);
outputError.addItem(vp.get(n.getOutputLayer()), input.getTarget());
}
float e = outputError.getTotalNetworkError();
if (e <= acceptanceError) {
System.out.println("Stopping at error " + e + " (" + (e * 100) + "%) for " + mbe.getBatchCount() + " minibatches");