Package tv.floe.metronome.deeplearning.dbn

Examples of tv.floe.metronome.deeplearning.dbn.DeepBeliefNetwork


    int n_layers = hiddenLayerSizes.length;
    RandomGenerator rng = new MersenneTwister(123);
   
   
    DeepBeliefNetwork dbn = new DeepBeliefNetwork( numIns, hiddenLayerSizes, numLabels, n_layers, rng ); //, Matrix input, Matrix labels);
       
    dbn.useRegularization = false;
    dbn.setSparsity(0.01);
    dbn.setMomentum(0);
   
    int recordsProcessed = 0;
   
   
    StopWatch watch = new StopWatch();
    watch.start();
   
    StopWatch batchWatch = new StopWatch();
   
   
//    do  {
     
      recordsProcessed += batchSize;
     
      System.out.println( "PreTrain: Batch Mode, Processed Total " + recordsProcessed + ", Elapsed Time " + watch.toString() );
     
      batchWatch.reset();
      batchWatch.start();
      dbn.preTrain( recordBatch.getFirst(), 1, learningRate, preTrainEpochs);
      batchWatch.stop();
     
      System.out.println( "Batch Training Elapsed Time " + batchWatch.toString() );

      System.out.println( "DBN Network Stats:\n" + dbn.generateNetworkSizeReport() );

/*     
      if (fetcher.hasNext()) {
        first = fetcher.next();
      }
     
    } while (fetcher.hasNext());

    fetcher.reset();
    first = fetcher.next();
   
    recordsProcessed = 0;
   
    do {
     
      recordsProcessed += batchSize;
*/     
      System.out.println( "FineTune: Batch Mode, Processed Total " + recordsProcessed + ", Elapsed Time " + watch.toString() );
     
     
      dbn.finetune( recordBatch.getSecond(), learningRate, fineTuneEpochs );

      dbn.preTrain( recordBatch.getFirst(), 1, learningRate, preTrainEpochs);
      dbn.finetune( recordBatch.getSecond(), learningRate, fineTuneEpochs );
     
      dbn.preTrain( recordBatch.getFirst(), 1, learningRate, preTrainEpochs);
      dbn.finetune( recordBatch.getSecond(), learningRate, fineTuneEpochs );
     
      /*     
      if (fetcher.hasNext()) {
        first = fetcher.next();
      }
     
    } while (fetcher.hasNext());
*/   
    watch.stop();
   
    System.out.println("----------- Training Complete! -----------");
    System.out.println( "Processed Total " + recordsProcessed + ", Elapsed Time " + watch.toString() );
   
    // save model
   
  //  dbn.write( "/tmp/metronome/dbn/TEST_DBN_MNIST/models/mnist.model" );
   
    FileOutputStream oFileOutStream = new FileOutputStream( "/tmp/Metronome_DBN_Mnist.model", false);
    dbn.write( oFileOutStream );
   
   
    // now do evaluation of results ....
//    fetcher.reset();
 
View Full Code Here


    int n_layers = hiddenLayerSizes.length;
    RandomGenerator rng = new MersenneTwister(123);
   
   
    DeepBeliefNetwork dbn = new DeepBeliefNetwork( numIns, hiddenLayerSizes, numLabels, n_layers, rng ); //, Matrix input, Matrix labels);
       
    dbn.useRegularization = false;
    dbn.setSparsity(0.01);
    dbn.setMomentum(0);
   
   
    int recordsProcessed = 0;
    int batchesProcessed = 0;
    long totalBatchProcessingTime = 0;
   
    StopWatch watch = new StopWatch();
    watch.start();
   
    StopWatch batchWatch = new StopWatch();
   
   
    do  {
     
      recordsProcessed += batchSize;
      batchesProcessed++;
     
      System.out.println( "PreTrain: Batch Mode, Processed Total " + recordsProcessed + ", Elapsed Time " + watch.toString() );
     
      batchWatch.reset();
      batchWatch.start();
      dbn.preTrain( first.getFirst(), 1, learningRate, preTrainEpochs);
      batchWatch.stop();
     
      totalBatchProcessingTime += batchWatch.getTime();
     
      System.out.println( "Batch Training Elapsed Time " + batchWatch.toString() );
View Full Code Here

    int n_layers = hiddenLayerSizes.length;
    RandomGenerator rng = new MersenneTwister(123);
   
   
    DeepBeliefNetwork dbn = new DeepBeliefNetwork( numIns, hiddenLayerSizes, numLabels, n_layers, rng ); //, Matrix input, Matrix labels);
       
    int recordsProcessed = 0;
   
    do  {
     
      recordsProcessed += batchSize;
     
      System.out.println( "PreTrain: Batch Mode, Processed Total " + recordsProcessed );
      dbn.preTrain( first.getFirst(), 1, learningRate, preTrainEpochs);

      if (fetcher.hasNext()) {
        first = fetcher.next();
      }
     
    } while (fetcher.hasNext());

    fetcher.reset();
    first = fetcher.next();
   
    recordsProcessed = 0;
   
    do {
     
      recordsProcessed += batchSize;
     
      System.out.println( "FineTune: Batch Mode, Processed Total " + recordsProcessed );
     
     
      dbn.finetune( first.getSecond(), learningRate, fineTuneEpochs );
     
      if (fetcher.hasNext()) {
        first = fetcher.next();
      }
     
    } while (fetcher.hasNext());
   
    System.out.println("----------- Training Complete! -----------");
   
    // save model
   
    System.out.println("----------- Saving Model -----------");
    dbn.write( "/tmp/metronome/dbn/TEST_DBN_MNIST/models/mnist.model" );
   
    // now do evaluation of results ....
     
     
 
View Full Code Here

    .hiddenLayerSizes(hiddenLayerSizes).numberOfInputs(nIns).renderWeights(0)
    .useRegularization(false).withMomentum(0).withDist(new NormalDistribution(0,0.001))
    .numberOfOutPuts(nOuts).withRng(rng).build();
*/
   
    DeepBeliefNetwork dbn = new DeepBeliefNetwork(nIns, hiddenLayerSizes, nOuts, hiddenLayerSizes.length, rng ); //, Matrix input, Matrix labels);
//    dbn.addWeightTransform(0, new MultiplyScalar(100));
//    dbn.addWeightTransform(1, new MultiplyScalar(10));

   
   
    dbn.preTrain(x_xor_Matrix,k, preTrainLr, preTrainEpochs);
   
   
   
    dbn.finetune(y_xor_Matrix,fineTuneLr, fineTuneEpochs);







    Matrix predict = dbn.predict( x_xor_Matrix );
    System.out.println("--- Predictions XOR ----");
    MatrixUtils.debug_print(predict);
    //log.info(predict.toString());

    Evaluation eval = new Evaluation();
View Full Code Here

    .hiddenLayerSizes(hiddenLayerSizes).numberOfInputs(nIns).renderWeights(0)
    .useRegularization(false).withMomentum(0).withDist(new NormalDistribution(0,0.001))
    .numberOfOutPuts(nOuts).withRng(rng).build();
*/
   
    DeepBeliefNetwork dbn = new DeepBeliefNetwork(nIns, hiddenLayerSizes, nOuts, hiddenLayerSizes.length, rng ); //, Matrix input, Matrix labels);
//    dbn.addWeightTransform(0, new MultiplyScalar(100));
//    dbn.addWeightTransform(1, new MultiplyScalar(10));

   
   
    dbn.preTrain(x_toy_Matrix,k, preTrainLr, preTrainEpochs);
   
   
   
    dbn.finetune(y_toy_Matrix,fineTuneLr, fineTuneEpochs);







    Matrix predict = dbn.predict( x_toy_Matrix );
    System.out.println("--- Predictions Toy Matrix ----");
    MatrixUtils.debug_print(predict);
    //log.info(predict.toString());

    Evaluation eval = new Evaluation();
View Full Code Here

    .hiddenLayerSizes(hiddenLayerSizes).numberOfInputs(nIns).renderWeights(0)
    .useRegularization(false).withMomentum(0).withDist(new NormalDistribution(0,0.001))
    .numberOfOutPuts(nOuts).withRng(rng).build();
*/
   
    DeepBeliefNetwork dbn = new DeepBeliefNetwork(nIns, hiddenLayerSizes, nOuts, hiddenLayerSizes.length, rng ); //, Matrix input, Matrix labels);

   
   
    dbn.preTrain(x_xor_Matrix,k, preTrainLr, preTrainEpochs);
    dbn.finetune(y_xor_Matrix,fineTuneLr, fineTuneEpochs);

   
   
   
    // save / write the model
   
    FileOutputStream oFileOutStream = new FileOutputStream( tmpFilename, false);
    dbn.write( oFileOutStream );
   
    // read / load the model
    FileInputStream oFileInputStream = new FileInputStream( tmpFilename );
   
   
    DeepBeliefNetwork dbn_deserialize = new DeepBeliefNetwork(1, hiddenLayerSizes, 1, hiddenLayerSizes.length, rng ); //, Matrix input, Matrix labels);
    dbn_deserialize.load(oFileInputStream);
   
   
   
   
    assertEquals( dbn.inputNeuronCount, dbn_deserialize.inputNeuronCount );
View Full Code Here

    .hiddenLayerSizes(hiddenLayerSizes).numberOfInputs(nIns).renderWeights(0)
    .useRegularization(false).withMomentum(0).withDist(new NormalDistribution(0,0.001))
    .numberOfOutPuts(nOuts).withRng(rng).build();
*/
   
    DeepBeliefNetwork dbn = new DeepBeliefNetwork(nIns, hiddenLayerSizes, nOuts, hiddenLayerSizes.length, rng ); //, Matrix input, Matrix labels);

   
   
    dbn.preTrain( x_xor_Matrix,k, preTrainLr, preTrainEpochs );
    dbn.finetune( y_xor_Matrix,fineTuneLr, fineTuneEpochs );

   
   
   
    // save / write the model
   
    FileOutputStream oFileOutStream = new FileOutputStream( tmpFilename, false);
    //dbn.serializeParameters( oFileOutStream );
    dbn.write(oFileOutStream);
   
    // read / load the model
    FileInputStream oFileInputStream = new FileInputStream( tmpFilename );
   
   
    DeepBeliefNetwork dbn_deserialize = new DeepBeliefNetwork(1, hiddenLayerSizes, 1, hiddenLayerSizes.length, rng ); //, Matrix input, Matrix labels);
    //dbn_deserialize.loadParameterValues( oFileInputStream );
    dbn_deserialize.load(oFileInputStream);
   
    int[] hiddenLayerSizesTmp = new int[] {1};
   
    // now setup a DBN based on a clone operation via initBasedOn()
    DeepBeliefNetwork dbn_merge_load = new DeepBeliefNetwork(1, hiddenLayerSizesTmp, 1, hiddenLayerSizesTmp.length, null); //1, , 1, hiddenLayerSizes.length, rng );
    // this will clear the weights
    dbn_merge_load.initBasedOn(dbn_deserialize);
   
    DeepBeliefNetwork dbn_merge_load_2 = new DeepBeliefNetwork(1, hiddenLayerSizesTmp, 1, hiddenLayerSizesTmp.length, null); //1, , 1, hiddenLayerSizes.length, rng );
    // this will clear the weights
    dbn_merge_load_2.initBasedOn( dbn );
   
    //assertEquals( dbn.inputNeuronCount, dbn_deserialize.inputNeuronCount );
    //assertEquals( dbn.l2, dbn_deserialize.l2, 0.0 );

    // check logistic layer
View Full Code Here

    int nIns = 2,nOuts = 2;
    int[] hiddenLayerSizes = new int[] {2,2};
    double fineTuneLr = 0.001;
    int fineTuneEpochs = 100;
   
    DeepBeliefNetwork dbn_A = new DeepBeliefNetwork(nIns, hiddenLayerSizes, nOuts, hiddenLayerSizes.length, rng, x_xor_Matrix, y_xor_Matrix );
   
    // setup layer 0
    dbn_A.preTrainingLayers[ 0 ].getConnectionWeights().set(0, 0, 1.0);
    dbn_A.preTrainingLayers[ 0 ].getConnectionWeights().set(0, 1, 1.0);
    dbn_A.preTrainingLayers[ 0 ].getConnectionWeights().set(1, 0, 1.0);
    dbn_A.preTrainingLayers[ 0 ].getConnectionWeights().set(1, 1, 1.0);
   
    // Layer 0: hidden bias
   
    dbn_A.preTrainingLayers[ 0 ].getHiddenBias().set( 0, 0, 15.0 );
    dbn_A.preTrainingLayers[ 0 ].getHiddenBias().set( 0, 1, 16.0 );
       
    // Layer 0: visible bias

    dbn_A.preTrainingLayers[ 0 ].getVisibleBias().set( 0, 0, 25.0 );
    dbn_A.preTrainingLayers[ 0 ].getVisibleBias().set( 0, 1, 26.0 );
   
   
    // setup layer 1
    dbn_A.preTrainingLayers[ 1 ].getConnectionWeights().set(0, 0, 1.0);
    dbn_A.preTrainingLayers[ 1 ].getConnectionWeights().set(0, 1, 1.0);
    dbn_A.preTrainingLayers[ 1 ].getConnectionWeights().set(1, 0, 1.0);
    dbn_A.preTrainingLayers[ 1 ].getConnectionWeights().set(1, 1, 1.0);

   
   
    // setup log layer
    dbn_A.logisticRegressionLayer.connectionWeights.set(0, 0, 1.0);
    dbn_A.logisticRegressionLayer.connectionWeights.set(0, 1, 1.0);
    dbn_A.logisticRegressionLayer.connectionWeights.set(1, 0, 1.0);
    dbn_A.logisticRegressionLayer.connectionWeights.set(1, 1, 1.0);
   
    MatrixUtils.debug_print( dbn_A.logisticRegressionLayer.biasTerms );

    // set up bias terms
   
    dbn_A.logisticRegressionLayer.biasTerms.set( 0, 0, 35.0 );
    dbn_A.logisticRegressionLayer.biasTerms.set( 0, 1, 36.0 );
   
   
   
   
    DeepBeliefNetwork dbn_B = new DeepBeliefNetwork(nIns, hiddenLayerSizes, nOuts, hiddenLayerSizes.length, rng, x_xor_Matrix, y_xor_Matrix );

    dbn_B.preTrainingLayers[ 0 ].getConnectionWeights().set(0, 0, 2.0);
    dbn_B.preTrainingLayers[ 0 ].getConnectionWeights().set(0, 1, 1.0);
    dbn_B.preTrainingLayers[ 0 ].getConnectionWeights().set(1, 0, 3.0);
    dbn_B.preTrainingLayers[ 0 ].getConnectionWeights().set(1, 1, 4.0);

    // Layer 0: hidden bias
   
    dbn_B.preTrainingLayers[ 0 ].getHiddenBias().set( 0, 0, 5.0 );
    dbn_B.preTrainingLayers[ 0 ].getHiddenBias().set( 0, 1, 6.0 );
       
    // Layer 0: visible bias

    dbn_B.preTrainingLayers[ 0 ].getVisibleBias().set( 0, 0, 5.0 );
    dbn_B.preTrainingLayers[ 0 ].getVisibleBias().set( 0, 1, 6.0 );

    // layer 2
   
    dbn_B.preTrainingLayers[ 1 ].getConnectionWeights().set(0, 0, 2.0);
    dbn_B.preTrainingLayers[ 1 ].getConnectionWeights().set(0, 1, 1.0);
    dbn_B.preTrainingLayers[ 1 ].getConnectionWeights().set(1, 0, 3.0);
    dbn_B.preTrainingLayers[ 1 ].getConnectionWeights().set(1, 1, 4.0);
   
    dbn_B.logisticRegressionLayer.connectionWeights.set(0, 0, 1.0);
    dbn_B.logisticRegressionLayer.connectionWeights.set(0, 1, 2.0);
    dbn_B.logisticRegressionLayer.connectionWeights.set(1, 0, 3.0);
    dbn_B.logisticRegressionLayer.connectionWeights.set(1, 1, 4.0);
   
    dbn_B.logisticRegressionLayer.biasTerms.set( 0, 0, 5.0 );
    dbn_B.logisticRegressionLayer.biasTerms.set( 0, 1, 6.0 );
   
   
    // MatrixUtils.debug_print( dbn_B.preTrainingLayers[ 0 ].getConnectionWeights() );
   
    int[] hiddenLayerSizesTmp = new int[] {1};
   
    // now setup a DBN based on a clone operation via initBasedOn()
//    DeepBeliefNetwork dbn_merge_load = new DeepBeliefNetwork(1, hiddenLayerSizesTmp, 1, hiddenLayerSizesTmp.length, null);
   
    // this only works if the workers have been initialized on data
    DeepBeliefNetwork dbn_master = new DeepBeliefNetwork(1, hiddenLayerSizesTmp, 1, hiddenLayerSizesTmp.length, null);
    dbn_master.initBasedOn(dbn_A);
   
   
    ArrayList<DeepBeliefNetwork> workers = new ArrayList<DeepBeliefNetwork>();
    workers.add(dbn_A);
    workers.add(dbn_B);
   
    dbn_master.computeAverageDBNParameterVector(workers);
   
    assertEquals( 1.5, dbn_master.preTrainingLayers[ 0 ].getConnectionWeights().get(0, 0), 0.0 );
    assertEquals( 1.0, dbn_master.preTrainingLayers[ 0 ].getConnectionWeights().get(0, 1), 0.0 );
    assertEquals( 2.0, dbn_master.preTrainingLayers[ 0 ].getConnectionWeights().get(1, 0), 0.0 );
    assertEquals( 2.5, dbn_master.preTrainingLayers[ 0 ].getConnectionWeights().get(1, 1), 0.0 );
View Full Code Here

 
        useRegularization = this.conf.get("tv.floe.metronome.dbn.conf.useRegularization");
     
      this.n_layers = hiddenLayerSizes.length;
     
      this.dbn = new DeepBeliefNetwork( numIns, hiddenLayerSizes, numLabels, n_layers, rng ); //, Matrix input, Matrix labels);
 
      // default it to off
      this.dbn.useRegularization = false;
     
      if (useRegularization != null && useRegularization.equals("true")) {
View Full Code Here

      for (DBNParameterVectorUpdateable dbn_worker : workerUpdates) {

        ByteArrayInputStream baInputStream = new ByteArrayInputStream( dbn_worker.param_msg.dbn_payload );
        //ByteArrayInputStream baInputStream = new ByteArrayInputStream( dbn_worker.param_msg. );
       
      DeepBeliefNetwork dbn_worker_deser = new DeepBeliefNetwork(1, hiddenLayerSizesTmp, 1, hiddenLayerSizesTmp.length, null); //1, , 1, hiddenLayerSizes.length, rng );
      dbn_worker_deser.load( baInputStream );
     
    //  System.out.println("Master > Printing incoming msg dbn: ");
    //  System.out.println("Master > Worker Iteration: " + dbn_worker.param_msg.iteration );
    //  DBNDebuggingUtil.printDebugLayers(dbn_worker_deser, 2);
     
      try {
        baInputStream.close();
      } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
     
      workerDBNs.add(dbn_worker_deser);
             
      // check the pre-train phase completion status
      if (false == dbn_worker.param_msg.preTrainPhaseComplete) {
       
        // if any one worker is not yet done, we cant move on
        areAllWorkersDoneWithPreTrainPhase = false;
       
      }
     
      if ( false == dbn_worker.param_msg.datasetPassComplete ) {
       
        areAllWorkersDoneWithCurrentDatasetEpoch = false;
       
      }
     
     
      }
     
      // init master w dummy params
      this.dbn_averaged_master = new DeepBeliefNetwork(1, hiddenLayerSizesTmp, 1, hiddenLayerSizesTmp.length, null);
     
      this.dbn_averaged_master.initBasedOn( workerDBNs.get( 0 ) );
      this.dbn_averaged_master.computeAverageDBNParameterVector(workerDBNs);
     
//      System.out.println("Master > Parameter Averaged! -------- ");
View Full Code Here

TOP

Related Classes of tv.floe.metronome.deeplearning.dbn.DeepBeliefNetwork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.