Quizás sea demasiado tarde, pero también uso Neuroph. Creé hasta 100-miles de redes durante la noche con mi SSD y 4 Core CPU. Cuando está utilizando Java 8, puede hacer subprocesos múltiples sin grandes habilidades de codificador. Solo eche un vistazo a los nuevos "Ejecutores" de Java 8. Lo uso en mi clase. Eche un vistazo al objeto "MONO". Y no te moleste el mal estilo de codificación. Necesitaba ser rápido aquí ...
package de.sauer.dispe;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.time.Instant;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.neuroph.core.NeuralNetwork;
import org.neuroph.core.data.DataSet;
import org.neuroph.nnet.MultiLayerPerceptron;
import org.neuroph.nnet.learning.BackPropagation;
import org.neuroph.util.TransferFunctionType;
import de.sauer.dispe.model.Director;
import de.sauer.dispe.model.Specialist;
@SuppressWarnings("rawtypes")
public class DirBreeder_old {
private static final int MAX_ITER = 40;
public static final double GG_EPS = 0.49;
private static final double[] ERROR_RANGE = {0.02, 0.05, 0.47};
private static final double[] LEARNING_RANGE = {0.1, 0.1, 0.3};
private static final int[] LAYER_RANGE = {25, 5, 50};
private static final TransferFunctionType[] TF_TYPES = {
TransferFunctionType.GAUSSIAN,
TransferFunctionType.LOG
};
private static final String DIRECTOR_FOLDER = SpecAnalyser.SPEC_PATH+"\\director\\";
private static final String OUTPUT_SUMMARY_FILE = DIRECTOR_FOLDER+"\\summary.csv";
private static final String DATASET_FILE = TeamBuilder.TEAM_PATH+"\\1918_train.csv";
private static ExecutorService MONKEY;
public static void main(String[] args) throws IOException {
doStuff();
}
public static void doStuff() throws IOException {
System.out.println("Starting at: "+Instant.now());
int counter = 0;
MONKEY = Executors.newFixedThreadPool(4);
FileWriter output = new FileWriter(new File(OUTPUT_SUMMARY_FILE), true);
DataSet ds = DataSet.createFromFile(DATASET_FILE, 11, 1, ";");
for(int firstLayer=LAYER_RANGE[0];firstLayer<=LAYER_RANGE[2];firstLayer+=LAYER_RANGE[1]) {
for(int secondLayer=LAYER_RANGE[0];secondLayer<=LAYER_RANGE[2];secondLayer+=LAYER_RANGE[1]) {
for(int thirdLayer=LAYER_RANGE[0];thirdLayer<=LAYER_RANGE[2];thirdLayer+=LAYER_RANGE[1]) {
for(int forthLayer=LAYER_RANGE[0];forthLayer<=LAYER_RANGE[2];forthLayer+=LAYER_RANGE[1]) {
for(double maxError=ERROR_RANGE[0];maxError<=ERROR_RANGE[2];maxError+=ERROR_RANGE[1]) {
for(double learnRate=LEARNING_RANGE[0];learnRate<=LEARNING_RANGE[2];learnRate+=LEARNING_RANGE[1]) {
for(TransferFunctionType tft: TF_TYPES) {
Specialist trainee = new Director(
buildAnn(tft, firstLayer, secondLayer, thirdLayer, forthLayer),
tft,
maxError,
ds,
MAX_ITER,
GG_EPS,
learnRate);
MONKEY.execute(new Trainer(trainee, output, counter++));
}
}
}
}
}
}
}
System.out.println("Building "+counter);
MONKEY.shutdown();
try {
MONKEY.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
output.flush();
output.close();
}
@SuppressWarnings("unchecked")
private static NeuralNetwork<BackPropagation> buildAnn(TransferFunctionType tft, int layer1, int layer2, int layer3, int layer4) {
NeuralNetwork nn = new MultiLayerPerceptron(tft, 11, layer1, layer2, layer3, layer4, 1);
nn.randomizeWeights();
return nn;
}
}
Gracias, por la respuesta rápida. Eso parece un gran artículo. Y un gran consejo sobre Neuroph. – user359708