instanceList.size()));
trainingInstances = instanceList.subList(0, instanceList.size()
- testSize);
for (double[] instance : trainingInstances) {
DoubleVector vec = new DenseDoubleVector(instance);
writer.append(new LongWritable(count++), new VectorWritable(vec));
}
writer.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (URISyntaxException e) {
e.printStackTrace();
}
// create model
int dimension = 8;
SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
ann.setLearningRate(0.7);
ann.setMomemtumWeight(0.5);
ann.setRegularizationWeight(0.1);
ann.addLayer(dimension, false,
FunctionFactory.createDoubleFunction("Sigmoid"));
ann.addLayer(dimension, false,
FunctionFactory.createDoubleFunction("Sigmoid"));
ann.addLayer(dimension, false,
FunctionFactory.createDoubleFunction("Sigmoid"));
ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
ann.setCostFunction(FunctionFactory
.createDoubleDoubleFunction("CrossEntropy"));
ann.setModelPath(modelPath);
long start = new Date().getTime();
Map<String, String> trainingParameters = new HashMap<String, String>();
trainingParameters.put("tasks", "5");
trainingParameters.put("training.max.iterations", "2000");
trainingParameters.put("training.batch.size", "300");
trainingParameters.put("convergence.check.interval", "1000");
ann.train(tmpDatasetPath, trainingParameters);
long end = new Date().getTime();
// validate results
double errorRate = 0;
// calculate the error on test instance
for (double[] testInstance : testInstances) {
DoubleVector instance = new DenseDoubleVector(testInstance);
double expected = instance.get(instance.getDimension() - 1);
instance = instance.slice(instance.getDimension() - 1);
double actual = ann.getOutput(instance).get(0);
if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
++errorRate;
}
}