Package edu.stanford.nlp.util

Examples of edu.stanford.nlp.util.Timing


  }

  public static ShiftReduceParser loadModel(String path, String ... extraFlags) {
    ShiftReduceParser parser = null;
    try {
      Timing timing = new Timing();
      System.err.print("Loading parser from serialized file " + path + " ...");
      parser = IOUtils.readObjectFromURLOrClasspathOrFileSystem(path);
      timing.done();
    } catch (IOException e) {
      throw new RuntimeIOException(e);
    } catch (ClassNotFoundException e) {
      throw new RuntimeIOException(e);
    }
View Full Code Here


            "pos.model:" + props.getProperty("pos.model", DefaultPaths.DEFAULT_POS_MODEL) +
            "pos.nthreads:" + props.getProperty("pos.nthreads", props.getProperty("nthreads", "")));
  }

  private static MaxentTagger loadModel(String loc, boolean verbose) {
    Timing timer = null;
    if (verbose) {
      timer = new Timing();
      timer.doing("Loading POS Model [" + loc + ']');
    }
    MaxentTagger tagger = new MaxentTagger(loc);
    if (verbose) {
      timer.done();
    }
    return tagger;
  }
View Full Code Here

      this.threadName = threadName;
    }

    @Override
    public void run() {
      Timing t = new Timing();
      resultsString = runClassifier(crf, filename);
      long millis = t.stop();
      System.out.println("Thread " + threadName + " took " + millis +
                         "ms to tag file " + filename);
    }       
View Full Code Here

    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    //            Loop
    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    Timing total = new Timing();
    Timing current = new Timing();
    total.start();
    current.start();
    for (k = 0; k<maxIterations ; k++)  {
      try{
        boolean doEval = (k > 0 && evaluateIters > 0 && k % evaluateIters == 0);
        if (doEval) {
          doEvaluation(x);
        }
        int pass = k/numBatches;
        int batch = k%numBatches;
        say("Iter: " + k + " pass " + pass + " batch " + batch);

        // restrict number of saved gradients
        //  (recycle memory of first gradient in list for new gradient)
        if(k > 0 && gradList.size() >= memory){
          newGrad = gradList.remove(0);
        }else{
          newGrad = new double[grad.length];
        }

        dfunction.hasNewVals = true;
        System.arraycopy(dfunction.derivativeAt(x,v,bSize),0,newGrad,0,newGrad.length);
        ArrayMath.assertFinite(newGrad,"newGrad");
        gradList.add(newGrad);
        grad = smooth(gradList);

        //Get the next X
        takeStep(dfunction);

        ArrayMath.assertFinite(newX,"newX");

        //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        // THIS IS FOR DEBUG ONLY
        //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        if(outputIterationsToFile && (k%outputFrequency == 0) && k!=0 ) {
          double curVal = dfunction.valueAt(x);
          say(" TrueValue{ " + curVal + " } ");
          file.println(k + " , " + curVal + " , " + total.report() );
        }
        //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        // END OF DEBUG STUFF
        //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

        if (k >= maxIterations) {
          sayln("Stochastic Optimization complete.  Stopped after max iterations");
          x = newX;
          break;
        }

        if (total.report() >= maxTime){
          sayln("Stochastic Optimization complete.  Stopped after max time");
          x = newX;
          break;
        }

        System.arraycopy(newX, 0, x, 0, x.length);

        say("[" + ( total.report() )/1000.0 + " s " );
        say("{" + (current.restart()/1000.0) + " s}] ");
        say(" "+dfunction.lastValue());

        if (quiet) {
          System.err.print(".");
        }else{
View Full Code Here

  sayln("       Max iterations is = " + maxIterations);

  Counter<K> lastUpdated = new ClassicCounter<K>();
  int timeStep = 0;

  Timing total = new Timing();
  total.start();

  for (int iter = 0; iter < numPasses; iter++) {
    double totalObjValue = 0;

    for (int j = 0; j < numBatches; j++) {
    int[] selectedData = getSample(function, this.batchSize);
    // the core adagrad
    Counter<K> gradient = function.derivativeAt(x, selectedData);
    totalObjValue = totalObjValue + function.valueAt(x, selectedData);

    for (K feature : gradient.keySet()) {
      double gradf = gradient.getCount(feature);
      double prevrate = eta / (Math.sqrt(sumGradSquare.getCount(feature)) + soften);

      double sgsValue = sumGradSquare.incrementCount(feature, gradf * gradf);
      double currentrate = eta / (Math.sqrt(sgsValue) + soften);
      double testupdate = x.getCount(feature) - (currentrate * gradient.getCount(feature));
      double lastUpdateTimeStep = lastUpdated.getCount(feature);
      double idleinterval = timeStep - lastUpdateTimeStep - 1;
      lastUpdated.setCount(feature, (double) timeStep);

      // does lazy update using idleinterval
      double trunc = Math
        .max(0.0, (Math.abs(testupdate) - (currentrate + prevrate * idleinterval) * this.lambdaL1));
      double trunc2 = trunc * Math.pow(1 - this.lambdaL2, currentrate + prevrate * idleinterval);
      double realupdate = Math.signum(testupdate) * trunc2;
      if (realupdate < EPS) {
      x.remove(feature);
      } else {
      x.setCount(feature, realupdate);
      }

      // reporting
      timeStep++;
      if (timeStep > maxIterations) {
      sayln("Stochastic Optimization complete.  Stopped after max iterations");
      break;
      }
      sayln(System.out.format("Iter %d \t batch: %d \t time=%.2f \t obj=%.4f", iter, timeStep,
        total.report() / 1000.0, totalObjValue).toString());
    }
    }
  }
  return x;
  }
View Full Code Here

    }

    @Override
    public void run() {
      try {
        Timing t = new Timing();
        TestClassifier testClassifier = new TestClassifier(tagger);
        long millis = t.stop();
        resultsString = testClassifier.resultsString(tagger);
        System.out.println("Thread " + threadName + " took " + millis +
                           " milliseconds to tag " + testClassifier.getNumWords() +
                           " words.\n" + resultsString);
      } catch(IOException e) {
View Full Code Here

   * @param sampleSize
   * @param seta
   */
  public double tune(AbstractStochasticCachingDiffUpdateFunction function, double[] initial, int sampleSize, double seta)
  {
    Timing timer = new Timing();
    int[] sample = function.getSample(sampleSize);
    double sobj = getObjective(function, initial, 1, sample);
    double besteta = 1;
    double bestobj = sobj;
    double eta = seta;
    int totest = 10;
    double factor = 2;
    boolean phase2 = false;
    while (totest > 0 || !phase2)
    {
      double obj = tryEta(function, initial, sample, eta);
      boolean okay = (obj < sobj);
      sayln("  Trying eta=" + eta + "  obj=" + obj + ((okay)? "(possible)":"(too large)"));
      if (okay)
      {
        totest -= 1;
        if (obj < bestobj) {
          bestobj = obj;
          besteta = eta;
        }
      }
      if (! phase2)
      {
        if (okay) {
          eta = eta * factor;
        } else {
          phase2 = true;
          eta = seta;
        }
      }
      if (phase2) {
        eta = eta / factor;
      }
    }
    // take it on the safe side (implicit regularization)
    besteta /= factor;
    // determine t
    t0 = (int) (1 / (besteta * lambda));
    sayln("  Taking eta=" + besteta + " t0=" + t0);
    sayln("  Tuning completed in: " + Timing.toSecondsString(timer.report()) + " s");
    return besteta;
  }
View Full Code Here

    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    //            Loop
    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

    Timing total = new Timing();
    Timing current = new Timing();
    total.start();
    current.start();
    int t=t0;
    int iters = 0;
    for (int pass = 0; pass < numPasses; pass++)  {
      boolean doEval = (pass > 0 && evaluateIters > 0 && pass % evaluateIters == 0);
      if (doEval) {
        rescale();
        doEvaluation(x);
      }

      double totalValue = 0;
      double lastValue = 0;
      say("Iter: " + iters + " pass " + pass + " batch 1 ... ");
      for (int batch = 0; batch < numBatches; batch++) {
        iters++;

        //Get the next X
        double eta = 1/(lambda*t);
        double gain = eta/xscale;
        lastValue = function.calculateStochasticUpdate(x, xscale, bSize, gain);
        totalValue += lastValue;
        // weight decay (for L2 regularization)
        xscale *= (1 - eta * lambda*bSize);
        t+=bSize;
      }
      if (xscale < 1e-6) {
        rescale();
      }
      try {
        ArrayMath.assertFinite(x,"x");
      } catch (ArrayMath.InvalidElementException e) {
        System.err.println(e.toString());
        for(int i=0;i<x.length;i++){ x[i]=Double.NaN; }
        break;
      }
      xnorm = getNorm(x)*xscale*xscale;
      // Calculate loss based on L2 regularization
      double loss = totalValue + 0.5 * xnorm * lambda * totalSamples;
      say(String.valueOf(numBatches));
      say("[" + ( total.report() )/1000.0 + " s " );
      say("{" + (current.restart()/1000.0) + " s}] ");
      sayln(" "+lastValue + " " + totalValue + " " + loss);

      if (iters >= maxIterations) {
        sayln("Stochastic Optimization complete.  Stopped after max iterations");
        break;
View Full Code Here

    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    //            Loop
    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

    Timing total = new Timing();
    Timing current = new Timing();
    total.start();
    current.start();
    int iters = 0;
    double gValue = 0;
    double wValue = 0;
    double currentRate = 0, testUpdate = 0, realUpdate = 0;
View Full Code Here

    model.vectorToParams(theta);
  }

  public static void train(SentimentModel model, String modelPath, List<Tree> trainingTrees, List<Tree> devTrees) {
    Timing timing = new Timing();
    long maxTrainTimeMillis = model.op.trainOptions.maxTrainTimeSeconds * 1000;
    int debugCycle = 0;
    double bestAccuracy = 0.0;

    // train using AdaGrad (seemed to work best during the dvparser project)
    double[] sumGradSquare = new double[model.totalParamSize()];
    Arrays.fill(sumGradSquare, model.op.trainOptions.initialAdagradWeight);

    int numBatches = trainingTrees.size() / model.op.trainOptions.batchSize + 1;
    System.err.println("Training on " + trainingTrees.size() + " trees in " + numBatches + " batches");
    System.err.println("Times through each training batch: " + model.op.trainOptions.epochs);
    for (int epoch = 0; epoch < model.op.trainOptions.epochs; ++epoch) {
      System.err.println("======================================");
      System.err.println("Starting epoch " + epoch);
      if (epoch > 0 && model.op.trainOptions.adagradResetFrequency > 0 &&
          (epoch % model.op.trainOptions.adagradResetFrequency == 0)) {
        System.err.println("Resetting adagrad weights to " + model.op.trainOptions.initialAdagradWeight);
        Arrays.fill(sumGradSquare, model.op.trainOptions.initialAdagradWeight);
      }

      List<Tree> shuffledSentences = Generics.newArrayList(trainingTrees);
      Collections.shuffle(shuffledSentences, model.rand);
      for (int batch = 0; batch < numBatches; ++batch) {
        System.err.println("======================================");
        System.err.println("Epoch " + epoch + " batch " + batch);

        // Each batch will be of the specified batch size, except the
        // last batch will include any leftover trees at the end of
        // the list
        int startTree = batch * model.op.trainOptions.batchSize;
        int endTree = (batch + 1) * model.op.trainOptions.batchSize;
        if (endTree + model.op.trainOptions.batchSize > shuffledSentences.size()) {
          endTree = shuffledSentences.size();
        }

        executeOneTrainingBatch(model, shuffledSentences.subList(startTree, endTree), sumGradSquare);

        long totalElapsed = timing.report();
        System.err.println("Finished epoch " + epoch + " batch " + batch + "; total training time " + totalElapsed + " ms");

        if (maxTrainTimeMillis > 0 && totalElapsed > maxTrainTimeMillis) {
          // no need to debug output, we're done now
          break;
        }

        if (batch == 0 && epoch > 0 && epoch % model.op.trainOptions.debugOutputEpochs == 0) {
          double score = 0.0;
          if (devTrees != null) {
            Evaluate eval = new Evaluate(model);
            eval.eval(devTrees);
            eval.printSummary();
            score = eval.exactNodeAccuracy() * 100.0;
          }

          // output an intermediate model
          if (modelPath != null) {
            String tempPath = modelPath;
            if (modelPath.endsWith(".ser.gz")) {
              tempPath = modelPath.substring(0, modelPath.length() - 7) + "-" + FILENAME.format(debugCycle) + "-" + NF.format(score) + ".ser.gz";
            } else if (modelPath.endsWith(".gz")) {
              tempPath = modelPath.substring(0, modelPath.length() - 3) + "-" + FILENAME.format(debugCycle) + "-" + NF.format(score) + ".gz";
            } else {
              tempPath = modelPath.substring(0, modelPath.length() - 3) + "-" + FILENAME.format(debugCycle) + "-" + NF.format(score);
            }
            model.saveSerialized(tempPath);
          }

          ++debugCycle;
        }
      }
      long totalElapsed = timing.report();

      if (maxTrainTimeMillis > 0 && totalElapsed > maxTrainTimeMillis) {
        System.err.println("Max training time exceeded, exiting");
        break;
      }
View Full Code Here

TOP

Related Classes of edu.stanford.nlp.util.Timing

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.