SparkConf sparkConf = new SparkConf();
sparkConf.setAppName("DL4J Spark Example");
JavaSparkContext sc = new JavaSparkContext(sparkConf);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
This is a ParameterServer configuration bean. The only option you'll really ever use is .unicastPort(int)
VoidConfiguration voidConfiguration = VoidConfiguration.builder()
SharedTrainingMaster is the basement of distributed training. Tt holds all logic required for training
TrainingMaster tm = new SharedTrainingMaster.Builder(voidConfiguration,batchSizePerWorker)
.rddTrainingApproach(RDDTrainingApproach.Export)
.batchSizePerWorker(batchSizePerWorker)
//Create the Spark network
SparkDl4jMultiLayer sparkNet = new SparkDl4jMultiLayer(sc, conf, tm);
for (int i = 0; i < numEpochs; i++) {
log.info("Completed Epoch {}", i);