Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
<dependency>
<groupId>org.deeplearning4j</groupId>
<artifactId>deeplearning4j-zoo</artifactId>
<version>1.0.0-M1.1</version>
</dependency>import org.deeplearning4j.zoo.model.AlexNet
import org.deeplearning4j.zoo.*;
...
int numberOfClassesInYourData = 1000;
int randomSeed = 123;
ZooModel zooModel = AlexNet.builder()
.numClasses(numberOfClassesInYourData)
.seed(randomSeed)
.build();
Model net = zooModel.init();ZooModel zooModel = AlexNet.builder()
.numClasses(numberOfClassesInYourData)
.seed(randomSeed)
.build();
MultiLayerConfiguration net = ((AlexNet) zooModel).conf();import org.deeplearning4j.zoo.model.VGG16;
import org.deeplearning4j.zoo.*;
...
ZooModel zooModel = VGG16.builder().build();;
Model net = zooModel.initPretrained(PretrainedType.IMAGENET);ZooModel zooModel = VGG16.builder().build();
Model net = zooModel.initPretrained(PretrainedType.VGGFACE);int numberOfClassesInYourData = 10;
int randomSeed = 123;
ZooModel zooModel = ResNet50.builder()
.numClasses(numberOfClassesInYourData)
.seed(randomSeed)
.build();
zooModel.setInputShape(new int[][]{{3, 28, 28}});String filename = “yolo.h5”;
KerasLayer.registerCustomLayer(“Lambda”, KerasSpaceToDepth.class);
ComputationGraph graph = KerasModelImport.importKerasModelAndWeights(filename, false);
INDArray priors = Nd4j.create(priorBoxes);
FineTuneConfiguration fineTuneConf = new FineTuneConfiguration.Builder()
.seed(seed)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
.gradientNormalizationThreshold(1.0)
.updater(new Adam.Builder().learningRate(1e-3).build())
.l2(0.00001)
.activation(Activation.IDENTITY)
.trainingWorkspaceMode(workspaceMode)
.inferenceWorkspaceMode(workspaceMode)
.build();
ComputationGraph model = new TransferLearning.GraphBuilder(graph)
.fineTuneConfiguration(fineTuneConf)
.addLayer(“outputs”, new Yolo2OutputLayer.Builder()
.boundingBoxPriors(priors)
.build(), “conv2d_23”)
.setOutputs(“outputs”)
.build();
System.out.println(model.summary(InputType.convolutional(608, 608, 3)));
ModelSerializer.writeModel(model, “yolo2_dl4j_inference.v1.zip”, false); }public String pretrainedUrl(PretrainedType pretrainedType)Special algorithms for gradient descent.
GraphBuilder graphBuilder = new NeuralNetConfiguration.Builder()
// add hyperparameters and other layers
.addLayer("softmax", new ActivationLayer(Activation.SOFTMAX), "previous_input")
// add more layers and output
.build(); ⎧ 1, if x > 1
f(x) = ⎨ -1, if x < -1
⎩ x, otherwiseMultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.learningRate(learningRate)
.updater(Updater.NESTEROVS).momentum(0.9)
.list()
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
.weightInit(WeightInit.XAVIER)
.activation("relu")
.build())
.layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
.weightInit(WeightInit.XAVIER)
.activation("softmax").weightInit(WeightInit.XAVIER)
.nIn(numHiddenNodes).nOut(numOutputs).build())
.pretrain(false).backprop(true).build();MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder()
.seed(seed)
.regularization(true).l2(0.0005)
.learningRate(0.01)
.weightInit(WeightInit.XAVIER)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(Updater.NESTEROVS).momentum(0.9)
.list()
.layer(0, new ConvolutionLayer.Builder(5, 5)
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
.nIn(nChannels)
.stride(1, 1)
.nOut(20)
.activation("identity")
.build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
.kernelSize(2,2)
.stride(2,2)
.build())
.layer(2, new ConvolutionLayer.Builder(5, 5)
//Note that nIn need not be specified in later layers
.stride(1, 1)
.nOut(50)
.activation("identity")
.build())
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
.kernelSize(2,2)
.stride(2,2)
.build())
.layer(4, new DenseLayer.Builder().activation("relu")
.nOut(500).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum)
.activation("softmax")
.build());public Builder corruptionLevel(double corruptionLevel)public Builder sparsity(double sparsity)public Builder encoderLayerSizes(int... encoderLayerSizes)public void setEncoderLayerSizes(int... encoderLayerSizes)public Builder decoderLayerSizes(int... decoderLayerSizes)public void setDecoderLayerSizes(int... decoderLayerSizes)public Builder reconstructionDistribution(ReconstructionDistribution distribution)public Builder lossFunction(IActivation outputActivationFn, LossFunctions.LossFunction lossFunction)public Builder lossFunction(Activation outputActivationFn, LossFunctions.LossFunction lossFunction)public Builder lossFunction(IActivation outputActivationFn, ILossFunction lossFunction)public Builder pzxActivationFn(IActivation activationFunction)public Builder pzxActivationFunction(Activation activation)public Builder nOut(int nOut)public Builder numSamples(int numSamples)public boolean hasBias()public Builder kernelSize(int... kernelSize)public Builder stride(int... stride)public Builder padding(int... padding)public Builder dilation(int... dilation)public Builder dataFormat(DataFormat dataFormat)public void setKernelSize(int... kernelSize)public void setStride(int... stride)public void setPadding(int... padding)public void setDilation(int... dilation)public boolean hasBias()public Builder convolutionMode(ConvolutionMode convolutionMode)public Builder kernelSize(int... kernelSize)public InputType getOutputType(int layerIndex, InputType inputType)public void setCropping(int... cropping)public Cropping1D build()public InputType getOutputType(int layerIndex, InputType inputType)public void setCropping(int... cropping)public Cropping2D build()public InputType getOutputType(int layerIndex, InputType inputType)public void setCropping(int... cropping)public Cropping3D build()Adding hooks and listeners on DL4J models.
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
//print the score with every 1 iteration
model.setListeners(new ScoreIterationListener(1));public EvaluativeListener(@NonNull DataSetIterator iterator, int frequency)public void iterationDone(Model model, int iteration, int epoch)public ScoreIterationListener(int printIterations)public CollectScoresIterationListener()public void iterationDone(Model model, int iteration, int epoch)public void exportScores(OutputStream outputStream) throws IOExceptionpublic void exportScores(OutputStream outputStream, String delimiter) throws IOExceptionpublic void exportScores(File file) throws IOExceptionpublic void exportScores(File file, String delimiter) throws IOException.keepAll() //Don't delete any models
.saveEveryNEpochs(2)
.build()
}.keepLast(3)
.saveEveryNIterations(1000)
.build();
}.keepLastAndEvery(3, 4)
.saveEvery(15, TimeUnit.MINUTES)
.build();
}public CheckpointListener build()public void onEpochStart(Model model)public PerformanceListener build()public TimeIterationListener(int iterationCount)ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Adam(0.01))
// add your layers and hyperparameters below
.build();public void applyUpdater(INDArray gradient, int iteration, int epoch)public void applyUpdater(INDArray gradient, int iteration, int epoch)public void applyUpdater(INDArray gradient, int iteration, int epoch)public void applyUpdater(INDArray gradient, int iteration, int epoch)public void applyUpdater(INDArray gradient, int iteration, int epoch)public void applyUpdater(INDArray gradient, int iteration, int epoch)How to build complex networks with DL4J computation graph.
MultiLayerNetwork net = ...
net.save(new File("...");
MultiLayerNetwork net2 = MultiLayerNetwork.load(new File("..."), true);ComputationGraph net = ...
net.save(new File("..."));
ComputationGraph net2 = ComputationGraph.load(new File("..."), true); Nd4j.getRandom().setSeed(12345);
ModelSerializer.restoreMultiLayerNetwork(modelFile);public static void writeModel(@NonNull Model model, @NonNull File file, boolean saveUpdater) throws IOExceptionpublic static void writeModel(@NonNull Model model, @NonNull File file, boolean saveUpdater,DataNormalization dataNormalization) throws IOExceptionpublic static void writeModel(@NonNull Model model, @NonNull String path, boolean saveUpdater) throws IOExceptionpublic static void writeModel(@NonNull Model model, @NonNull OutputStream stream, boolean saveUpdater)
throws IOExceptionpublic static void writeModel(@NonNull Model model, @NonNull OutputStream stream, boolean saveUpdater,DataNormalization dataNormalization)
throws IOExceptionpublic static MultiLayerNetwork restoreMultiLayerNetwork(@NonNull File file) throws IOExceptionpublic static MultiLayerNetwork restoreMultiLayerNetwork(@NonNull File file, boolean loadUpdater)
throws IOExceptionpublic static MultiLayerNetwork restoreMultiLayerNetwork(@NonNull InputStream is, boolean loadUpdater)
throws IOExceptionpublic static MultiLayerNetwork restoreMultiLayerNetwork(@NonNull InputStream is) throws IOExceptionpublic static MultiLayerNetwork restoreMultiLayerNetwork(@NonNull String path) throws IOExceptionpublic static MultiLayerNetwork restoreMultiLayerNetwork(@NonNull String path, boolean loadUpdater)
throws IOExceptionpublic static ComputationGraph restoreComputationGraph(@NonNull String path) throws IOExceptionpublic static ComputationGraph restoreComputationGraph(@NonNull String path, boolean loadUpdater)
throws IOExceptionpublic static ComputationGraph restoreComputationGraph(@NonNull InputStream is, boolean loadUpdater)
throws IOExceptionpublic static ComputationGraph restoreComputationGraph(@NonNull InputStream is) throws IOExceptionpublic static ComputationGraph restoreComputationGraph(@NonNull File file) throws IOExceptionpublic static ComputationGraph restoreComputationGraph(@NonNull File file, boolean loadUpdater) throws IOExceptionpublic static Task taskByModel(Model model)public static void addNormalizerToModel(File f, Normalizer<?> normalizer)public static void addObjectToFile(@NonNull File f, @NonNull String key, @NonNull Object o)ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.01))
.graphBuilder()
.addInputs("input") //can use any label for this
.addLayer("L1", new GravesLSTM.Builder().nIn(5).nOut(5).build(), "input")
.addLayer("L2",new RnnOutputLayer.Builder().nIn(5+5).nOut(5).build(), "input", "L1")
.setOutputs("L2") //We need to specify the network outputs and their order
.build();
ComputationGraph net = new ComputationGraph(conf);
net.init();ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.01))
.graphBuilder()
.addInputs("input1", "input2")
.addLayer("L1", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input1")
.addLayer("L2", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input2")
.addVertex("merge", new MergeVertex(), "L1", "L2")
.addLayer("out", new OutputLayer.Builder().nIn(4+4).nOut(3).build(), "merge")
.setOutputs("out")
.build();ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.01))
.graphBuilder()
.addInputs("input")
.addLayer("L1", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input")
.addLayer("out1", new OutputLayer.Builder()
.lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nIn(4).nOut(3).build(), "L1")
.addLayer("out2", new OutputLayer.Builder()
.lossFunction(LossFunctions.LossFunction.MSE)
.nIn(4).nOut(2).build(), "L1")
.setOutputs("out1","out2")
.build();int numLinesToSkip = 0;
String fileDelimiter = ",";
RecordReader rr = new CSVRecordReader(numLinesToSkip,fileDelimiter);
String csvPath = "/path/to/my/file.csv";
rr.initialize(new FileSplit(new File(csvPath)));
int batchSize = 4;
MultiDataSetIterator iterator = new RecordReaderMultiDataSetIterator.Builder(batchSize)
.addReader("myReader",rr)
.addInput("myReader",0,2) //Input: columns 0 to 2 inclusive
.addOutput("myReader",3,4) //Output: columns 3 to 4 inclusive
.build();int numLinesToSkip = 0;
String fileDelimiter = ",";
RecordReader featuresReader = new CSVRecordReader(numLinesToSkip,fileDelimiter);
String featuresCsvPath = "/path/to/my/myInput.csv";
featuresReader.initialize(new FileSplit(new File(featuresCsvPath)));
RecordReader labelsReader = new CSVRecordReader(numLinesToSkip,fileDelimiter);
String labelsCsvPath = "/path/to/my/myOutput.csv";
labelsReader.initialize(new FileSplit(new File(labelsCsvPath)));
int batchSize = 4;
int numClasses = 3;
MultiDataSetIterator iterator = new RecordReaderMultiDataSetIterator.Builder(batchSize)
.addReader("csvInput", featuresReader)
.addReader("csvLabels", labelsReader)
.addInput("csvInput") //Input: all columns from input reader
.addOutput("csvLabels", 0, 3) //Output 1: columns 0 to 3 inclusive
.addOutputOneHot("csvLabels", 4, numClasses) //Output 2: column 4 -> convert to one-hot for classification
.build();public void setBackpropGradientsViewArray(INDArray backpropGradientsViewArray)Recurrent Neural Network (RNN) implementations in DL4J.
.layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX)
.weightInit(WeightInit.XAVIER).nIn(prevLayerSize).nOut(nOut).build()).backpropType(BackpropType.TruncatedBPTT)
.tBPTTLength(100)Evaluation.evalTimeSeries(INDArray labels, INDArray predicted, INDArray outputMask) INDArray timeSeriesFeatures = ...;
INDArray timeSeriesOutput = myNetwork.output(timeSeriesFeatures);
int timeSeriesLength = timeSeriesOutput.size(2); //Size of time dimension
INDArray lastTimeStepProbabilities = timeSeriesOutput.get(NDArrayIndex.point(0), NDArrayIndex.all(), NDArrayIndex.point(timeSeriesLength-1)); INDArray labelsMaskArray = ...;
INDArray lastTimeStepIndices = Nd4j.argMax(labelMaskArray,1); INDArray featuresMaskArray = ...;
int longestTimeSeries = featuresMaskArray.size(1);
INDArray linspace = Nd4j.linspace(1,longestTimeSeries,longestTimeSeries);
INDArray temp = featuresMaskArray.mulColumnVector(linspace);
INDArray lastTimeStepIndices = Nd4j.argMax(temp,1); int numExamples = timeSeriesFeatures.size(0);
for( int i=0; i<numExamples; i++ ){
int thisTimeSeriesLastIndex = lastTimeStepIndices.getInt(i);
INDArray thisExampleProbabilities = timeSeriesOutput.get(NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.point(thisTimeSeriesLastIndex));
}SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");featureReader.initialize(new NumberedFileInputSplit("/path/to/data/myInput_%d.csv", 0, 9));
labelReader.initialize(new NumberedFileInputSplit(/path/to/data/myLabels_%d.csv", 0, 9));DataSetIterator iter = new SequenceRecordReaderDataSetIterator(featureReader, labelReader, miniBatchSize, numPossibleLabels, regression);SequenceRecordReader reader = new CSVSequenceRecordReader(1, ",");
reader.initialize(new NumberedFileInputSplit("/path/to/data/myData_%d.csv", 0, 9));
DataSetIterator iterClassification = new SequenceRecordReaderDataSetIterator(reader, miniBatchSize, numPossibleLabels, labelIndex, false);DataSetIterator iterRegression = new SequenceRecordReaderDataSetIterator(reader, miniBatchSize, -1, labelIndex, true);DataSetIterator variableLengthIter = new SequenceRecordReaderDataSetIterator(featureReader, labelReader, miniBatchSize, numPossibleLabels, regression, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_END);DataSetIterator variableLengthIter = new SequenceRecordReaderDataSetIterator(featureReader, labelReader, miniBatchSize, numPossibleLabels, regression, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_END);public void setNIn(int nIn)public RnnOutputLayer build()public long getNOut()public IUpdater getUpdaterByParam(String paramName)Supported neural network layers.
public ActivationLayer clone()public Builder activation(String activationFunction)public Builder activation(IActivation activationFunction)public Builder activation(Activation activation)public Builder hasBias(boolean hasBias)public Builder hasLayerNorm(boolean hasLayerNorm)public DropoutLayer build()public Builder hasBias(boolean hasBias)public Builder weightInit(EmbeddingInitializer embeddingInitializer)public Builder weightInit(INDArray vectors)public Builder hasBias(boolean hasBias)public Builder inputLength(int inputLength)public Builder inferInputLength(boolean inferInputLength)public Builder weightInit(EmbeddingInitializer embeddingInitializer)public Builder weightInit(INDArray vectors)public Builder poolingDimensions(int... poolingDimensions)public Builder poolingType(PoolingType poolingType)public Builder collapseDimensions(boolean collapseDimensions)public Builder pnorm(int pnorm)public Builder k(double k)public Builder n(double n)public Builder alpha(double alpha)public Builder beta(double beta)public Builder cudnnAllowFallback(boolean allowFallback)public Builder nIn(int nIn)public Builder nOut(int nOut)public Builder activation(Activation activation)public Builder kernelSize(int k)public Builder stride(int s)public Builder padding(int p)public Builder convolutionMode(ConvolutionMode cm)public Builder dilation(int d)public Builder hasBias(boolean hasBias)public Builder setInputSize(int inputSize)public void setKernel(int... kernel)public void setStride(int... stride)public void setPadding(int... padding)public void setDilation(int... dilation)public Builder nIn(int nIn)public Builder nOut(int nOut)public Builder activation(Activation activation)public Builder kernelSize(int... k)public Builder stride(int... s)public Builder padding(int... p)public Builder convolutionMode(ConvolutionMode cm)public Builder dilation(int... d)public Builder hasBias(boolean hasBias)public Builder setInputSize(int... inputSize)public Builder nIn(int nIn)public OutputLayer build()public void setKernelSize(int... kernelSize)public void setStride(int... stride)public void setPadding(int... padding)If input (for a single example, with channels down page, and sequence from left to right) is:
[ A1, A2, A3]
[ B1, B2, B3]
Then output with size = 2 is:
[ A1, A1, A2, A2, A3, A3]
[ B1, B1, B2, B2, B3, B2]public Builder size(int size)public Builder size(int[] size)Input (slice for one example and channel)
[ A, B ]
[ C, D ]
Size = [2, 2]
Output (slice for one example and channel)
[ A, A, B, B ]
[ A, A, B, B ]
[ C, C, D, D ]
[ C, C, D, D ]public Builder size(int size)public Builder size(int[] size)public Builder size(int size)public Builder size(int[] size)public void setPadding(int... padding)public ZeroPadding1DLayer build()public void setPadding(int... padding)public ZeroPadding3DLayer build()public void setPadding(int... padding)public ZeroPaddingLayer build()public LayerMemoryReport getMemoryReport(InputType inputType)public int getRepetitionFactor()public void setRepetitionFactor(int n)public Builder repetitionFactor(int n)public Builder lambdaCoord(double lambdaCoord)public Builder lambbaNoObj(double lambdaNoObj)public Builder lossPositionScale(ILossFunction lossPositionScale)public Builder lossClassPredictions(ILossFunction lossClassPredictions)public Builder boundingBoxPriors(INDArray boundingBoxes)king:queen::man:[woman, Attempted abduction, teenager, girl]
//Weird, but you can kind of see it
China:Taiwan::Russia:[Ukraine, Moscow, Moldova, Armenia]
//Two large countries and their small, estranged neighbors
house:roof::castle:[dome, bell_tower, spire, crenellations, turrets]
knee:leg::elbow:[forearm, arm, ulna_bone]
New York Times:Sulzberger::Fox:[Murdoch, Chernin, Bancroft, Ailes]
//The Sulzberger-Ochs family owns and runs the NYT.
//The Murdoch family owns News Corp., which owns Fox News.
//Peter Chernin was News Corp.'s COO for 13 yrs.
//Roger Ailes is president of Fox News.
//The Bancroft family sold the Wall St. Journal to News Corp.
love:indifference::fear:[apathy, callousness, timidity, helplessness, inaction]
//the poetry of this single array is simply amazing...
Donald Trump:Republican::Barack Obama:[Democratic, GOP, Democrats, McCain]
//It's interesting to note that, just as Obama and McCain were rivals,
//so too, Word2vec thinks Trump has a rivalry with the idea Republican.
monkey:human::dinosaur:[fossil, fossilized, Ice_Age_mammals, fossilization]
//Humans are fossilized monkeys? Humans are what's left
//over from monkeys? Humans are the species that beat monkeys
//just as Ice Age mammals beat dinosaurs? Plausible.
building:architect::software:[programmer, SecurityCenter, WinPcap]String filePath = new ClassPathResource("raw_sentences.txt").getFile().getAbsolutePath();
log.info("Load & Vectorize Sentences....");
// Strip white space before and after for each line
SentenceIterator iter = new BasicLineIterator(filePath);log.info("Load data....");
SentenceIterator iter = new LineSentenceIterator(new File("/Users/cvn/Desktop/file.txt"));
iter.setPreProcessor(new SentencePreProcessor() {
@Override
public String preProcess(String sentence) {
return sentence.toLowerCase();
}
});SentenceIterator iter = new LineSentenceIterator(new File("/your/absolute/file/path/here.txt"));// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());log.info("Building model....");
Word2Vec vec = new Word2Vec.Builder()
.minWordFrequency(5)
.layerSize(100)
.seed(42)
.windowSize(5)
.iterate(iter)
.tokenizerFactory(t)
.build();
log.info("Fitting Word2Vec model....");
vec.fit();// Write word vectors
WordVectorSerializer.writeWordVectors(vec, "pathToWriteto.txt");
log.info("Closest Words:");
Collection<String> lst = vec.wordsNearest("day", 10);
System.out.println(lst);
//output: [night, week, year, game, season, during, office, until, -]double cosSim = vec.similarity("day", "night");
System.out.println(cosSim);
//output: 0.7704452276229858Collection<String> lst3 = vec.wordsNearest("man", 10);
System.out.println(lst3);
//output: [director, company, program, former, university, family, group, such, general]log.info("Save vectors....");
WordVectorSerializer.writeWord2VecModel(vec, "pathToSaveModel.txt");Collection<String> kingList = vec.wordsNearest(Arrays.asList("king", "woman"), Arrays.asList("queen"), 10);Word2Vec word2Vec = WordVectorSerializer.readWord2VecModel("pathToSaveModel.txt");WeightLookupTable weightLookupTable = word2Vec.lookupTable();
Iterator<INDArray> vectors = weightLookupTable.vectors();
INDArray wordVectorMatrix = word2Vec.getWordVectorMatrix("myword");
double[] wordVector = word2Vec.getWordVector("myword");File gModel = new File("/Developer/Vector Models/GoogleNews-vectors-negative300.bin.gz");
Word2Vec vec = WordVectorSerializer.readWord2VecModel(gModel);//Click:
IntelliJ Preferences > Compiler > Command Line Options
//Then paste:
-Xms1024m
-Xmx10g
-XX:MaxPermSize=2gjava.lang.StackOverflowError: null
at java.lang.ref.Reference.<init>(Reference.java:254) ~[na:1.8.0_11]
at java.lang.ref.WeakReference.<init>(WeakReference.java:69) ~[na:1.8.0_11]
at java.io.ObjectStreamClass$WeakClassKey.<init>(ObjectStreamClass.java:2306) [na:1.8.0_11]
at java.io.ObjectStreamClass.lookup(ObjectStreamClass.java:322) ~[na:1.8.0_11]
at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1134) ~[na:1.8.0_11]
at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) ~[na:1.8.0_11]ehcache_auto_created2810726831714447871diskstore
ehcache_auto_created4727787669919058795diskstore
ehcache_auto_created3883187579728988119diskstore
ehcache_auto_created9101229611634051478diskstoreWord2Vec vec = new Word2Vec.Builder().layerSize(300).windowSize(5)
.layerSize(300).iterate(iter).tokenizerFactory(t).build();WordVectors wordVectors = WordVectorSerializer.loadTxtVectors(new File("glove.6B.50d.txt"));It's like numbers are language, like all the letters in the language are turned into numbers, and so it's something that everyone understands the same way. You lose the sounds of the letters and whether they click or pop or touch the palate, or go ooh or aah, and anything that can be misread or con you with its music or the pictures it puts in your mind, all of that is gone, along with the accent, and you have a new understanding entirely, a language of numbers, and everything becomes as clear to everyone as the writing on the wall. So as I say there comes a certain time for the reading of the numbers.
-- E.L. Doctorow, Billy BathgateData iteration tools for loading into neural networks.
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
// pass an MNIST data iterator that automatically fetches data
DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, rngSeed);
net.fit(mnistTrain);// passing directly to the neural network
DataSetIterator mnistTest = new MnistDataSetIterator(batchSize, false, rngSeed);
net.eval(mnistTest);
// using an evaluation class
Evaluation eval = new Evaluation(10); //create an evaluation object with 10 possible classes
while(mnistTest.hasNext()){
DataSet next = mnistTest.next();
INDArray output = model.output(next.getFeatureMatrix()); //get the networks prediction
eval.eval(next.getLabels(), output); //check the prediction against the true class
}public UciSequenceDataSetIterator(int batchSize)public Cifar10DataSetIterator(int batchSize)public IrisDataSetIterator()public DataSet next()public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numLabels, boolean useSubset,
PathLabelGenerator labelGenerator, boolean train, double splitTrainTest,
ImageTransform imageTransform, Random rng)public TinyImageNetDataSetIterator(int batchSize)public EmnistDataSetIterator(Set dataSet, int batch, boolean train) throws IOExceptionpublic static int numExamplesTrain(Set dataSet)public static int numExamplesTest(Set dataSet)public static int numLabels(Set dataSet)public static boolean isBalanced(Set dataSet)rr.initialize(new FileSplit(new File("/path/to/directory")));
DataSetIterator iter = new RecordReaderDataSetIterator.Builder(rr, 32)
//Label index (first arg): Always value 1 when using ImageRecordReader. For CSV etc: use index of the column
// that contains the label (should contain an integer value, 0 to nClasses-1 inclusive). Column indexes start
// at 0. Number of classes (second arg): number of label classes (i.e., 10 for MNIST - 10 digits)
.classification(1, nClasses)
.preProcessor(new ImagePreProcessingScaler()) //For normalization of image values 0-255 to 0-1
.build()
}rr.initialize(new FileSplit(new File("/path/to/myCsv.txt")));
DataSetIterator iter = new RecordReaderDataSetIterator.Builder(rr, 128)
//Specify the columns that the regression labels/targets appear in. Note that all other columns will be
// treated as features. Columns indexes start at 0
.regression(labelColFrom, labelColTo)
.build()
}public RecordReaderDataSetIterator(RecordReader recordReader, int batchSize)public void setCollectMetaData(boolean collectMetaData)public DataSet loadFromMetaData(RecordMetaData recordMetaData) throws IOExceptionpublic DataSet loadFromMetaData(List<RecordMetaData> list) throws IOExceptionpublic Builder writableConverter(WritableConverter converter)public Builder maxNumBatches(int maxNumBatches)public Builder regression(int labelIndex)public Builder regression(int labelIndexFrom, int labelIndexTo)public Builder classification(int labelIndex, int numClasses)public Builder preProcessor(DataSetPreProcessor preProcessor)public Builder collectMetaData(boolean collectMetaData)public RecordReaderMultiDataSetIterator build()public MultiDataSet loadFromMetaData(RecordMetaData recordMetaData) throws IOExceptionpublic MultiDataSet loadFromMetaData(List<RecordMetaData> list) throws IOExceptionpublic SequenceRecordReaderDataSetIterator(SequenceRecordReader featuresReader, SequenceRecordReader labels,
int miniBatchSize, int numPossibleLabels)public boolean hasNext()public DataSet loadFromMetaData(RecordMetaData recordMetaData) throws IOExceptionpublic DataSet loadFromMetaData(List<RecordMetaData> list) throws IOExceptionpublic MultiDataSet next(int num)public void setPreProcessor(MultiDataSetPreProcessor preProcessor)public boolean resetSupported()public boolean asyncSupported()public void reset()public void shutdown()public boolean hasNext()public MultiDataSet next()public void remove()public AsyncDataSetIterator(DataSetIterator baseIterator)public DataSet next(int num)public int inputColumns()public int totalOutcomes()public boolean resetSupported()public boolean asyncSupported()public void reset()public void shutdown()public int batch()public void setPreProcessor(DataSetPreProcessor preProcessor)public DataSetPreProcessor getPreProcessor()public boolean hasNext()public DataSet next()public void remove()public DoublesDataSetIterator(@NonNull Iterable<Pair<double[], double[]>> iterable, int batchSize)public SamplingDataSetIterator(DataSet sampleFrom, int batchSize, int totalNumberSamples)public INDArrayDataSetIterator(@NonNull Iterable<Pair<INDArray, INDArray>> iterable, int batchSize)public WorkspacesShieldDataSetIterator(@NonNull DataSetIterator iterator)public MultiDataSetIteratorSplitter(@NonNull MultiDataSetIterator baseIterator, long totalBatches, double ratio)public MultiDataSetIterator getTrainIterator()public MultiDataSet next(int num)public AsyncShieldDataSetIterator(@NonNull DataSetIterator iterator)public DataSet next(int num)public int inputColumns()public int totalOutcomes()public boolean resetSupported()public boolean asyncSupported()public void reset()public int batch()public void setPreProcessor(DataSetPreProcessor preProcessor)public DataSetPreProcessor getPreProcessor()public boolean hasNext()public DataSet next()public void remove()public MultiDataSet next(int num)public void setPreProcessor(MultiDataSetPreProcessor preProcessor)public boolean resetSupported()public boolean asyncSupported()public void reset()public boolean hasNext()public MultiDataSet next()public void remove()public RandomMultiDataSetIterator(int numMiniBatches, @NonNull List<Triple<long[], Character, Values>> features, @NonNull List<Triple<long[], Character, Values>> labels)public Builder addFeatures(long[] shape, Values values)public Builder addFeatures(long[] shape, char order, Values values)public Builder addLabels(long[] shape, Values values)public Builder addLabels(long[] shape, char order, Values values)public static INDArray generate(long[] shape, Values values)public static INDArray generate(long[] shape, char order, Values values)public EarlyTerminationMultiDataSetIterator(MultiDataSetIterator underlyingIterator, int terminationPoint)public ExistingDataSetIterator(@NonNull Iterator<DataSet> iterator)public DataSet next(int num)public EarlyTerminationDataSetIterator(DataSetIterator underlyingIterator, int terminationPoint)public DataSet next(int num)public int inputColumns()public int totalOutcomes()public void reset()public int batch()public boolean hasNext()public DataSet next()public void remove()public DataSetIteratorSplitter(@NonNull DataSetIterator baseIterator, long totalBatches, double ratio)public DataSetIterator getTrainIterator()public DataSet next(int i)public JointMultiDataSetIterator(DataSetIterator... iterators)public MultiDataSet next(int num)public void setPreProcessor(MultiDataSetPreProcessor preProcessor)public MultiDataSetPreProcessor getPreProcessor()public boolean resetSupported()public boolean asyncSupported()public void reset()public boolean hasNext()public MultiDataSet next()public void remove()public FloatsDataSetIterator(@NonNull Iterable<Pair<float[], float[]>> iterable, int batchSize)public FileSplitDataSetIterator(@NonNull List<File> files, @NonNull FileCallback callback)public DataSet next(int num)public int inputColumns()public int totalOutcomes()public void reset()public int batch()public boolean hasNext()public void remove()public MultiDataSetWrapperIterator(MultiDataSetIterator iterator)public RandomDataSetIterator(int numMiniBatches, long[] featuresShape, long[] labelsShape, Values featureValues, Values labelValues)