Package org.apache.commons.lang3.time

Examples of org.apache.commons.lang3.time.StopWatch


            context.put("lenskit.eval.command.name", getName());
            context.put("lenskit.eval.algorithm.name", algorithm.getName());

            // TODO Support serializing the recommender
            LenskitRecommender rec;
            StopWatch timer = new StopWatch();
            timer.start();
            try {
                logger.info("{}: building recommender {}", getName(), algorithm.getName());
                LenskitConfiguration config = new LenskitConfiguration();
                inputData.configure(config);
                rec = algorithm.buildRecommender(config);
            } catch (RecommenderBuildException e) {
                throw new TaskExecutionException(getName() + ": error building recommender", e);
            }
            timer.stop();
            logger.info("{}: trained in {}", getName(), timer);
            return action.apply(rec);
        } finally {
            context.finish();
        }
View Full Code Here


            outputs = task.getOutputs().getPrefixed(algorithmInfo, dataSet);
            TableWriter userResults = outputs.getUserWriter();
            List<Object> outputRow = Lists.newArrayList();

            logger.info("Building {} on {}", algorithmInfo, dataSet);
            StopWatch buildTimer = new StopWatch();
            buildTimer.start();
            buildRecommender();
            buildTimer.stop();
            logger.info("Built {} in {}", algorithmInfo.getName(), buildTimer);

            logger.info("Measuring {} on {}", algorithmInfo.getName(), dataSet.getName());

            StopWatch testTimer = new StopWatch();
            testTimer.start();
            List<Object> userRow = Lists.newArrayList();

            List<MetricWithAccumulator<?>> accumulators = Lists.newArrayList();

            for (Metric<?> eval: outputs.getMetrics()) {
                accumulators.add(makeMetricAccumulator(eval));
            }

            LongSet testUsers = dataSet.getTestData().getUserDAO().getUserIds();
            final NumberFormat pctFormat = NumberFormat.getPercentInstance();
            pctFormat.setMaximumFractionDigits(2);
            pctFormat.setMinimumFractionDigits(2);
            final int nusers = testUsers.size();
            logger.info("Testing {} on {} ({} users)", algorithmInfo, dataSet, nusers);
            int ndone = 0;
            for (LongIterator iter = testUsers.iterator(); iter.hasNext();) {
                if (Thread.interrupted()) {
                    throw new InterruptedException("eval job interrupted");
                }
                long uid = iter.nextLong();
                userRow.add(uid);
                userRow.add(null); // placeholder for the per-user time
                assert userRow.size() == 2;

                Stopwatch userTimer = Stopwatch.createStarted();
                TestUser test = getUserResults(uid);

                userRow.add(test.getTrainHistory().size());
                userRow.add(test.getTestHistory().size());

                for (MetricWithAccumulator<?> accum : accumulators) {
                    List<Object> ures = accum.measureUser(test);
                    if (ures != null) {
                        userRow.addAll(ures);
                    }
                }
                userTimer.stop();
                userRow.set(1, userTimer.elapsed(TimeUnit.MILLISECONDS) * 0.001);
                if (userResults != null) {
                    try {
                        userResults.writeRow(userRow);
                    } catch (IOException e) {
                        throw new RuntimeException("error writing user row", e);
                    }
                }
                userRow.clear();

                ndone += 1;
                if (ndone % 100 == 0) {
                    testTimer.split();
                    double time = testTimer.getSplitTime();
                    double tpu = time / ndone;
                    double tleft = (nusers - ndone) * tpu;
                    logger.info("tested {} of {} users ({}), ETA {}",
                                ndone, nusers, pctFormat.format(((double) ndone) / nusers),
                                DurationFormatUtils.formatDurationHMS((long) tleft));
                }
            }
            testTimer.stop();
            logger.info("Tested {} in {}", algorithmInfo.getName(), testTimer);

            writeMetricValues(buildTimer, testTimer, outputRow, accumulators);
            bus.post(JobEvents.finished(this));
        } catch (Throwable th) {
View Full Code Here

        Vector uvec = Vector.createLength(userCount);
        Vector ivec = Vector.createLength(itemCount);

        for (int f = 0; f < featureCount; f++) {
            logger.debug("Training feature {}", f);
            StopWatch timer = new StopWatch();
            timer.start();

            uvec.fill(initialValue);
            ivec.fill(initialValue);

            FeatureInfo.Builder fib = new FeatureInfo.Builder(f);
            trainFeature(f, estimates, uvec, ivec, fib);
            summarizeFeature(uvec, ivec, fib);
            featureInfo.add(fib.build());

            // Update each rating's cached value to accommodate the feature values.
            estimates.update(uvec, ivec);

            // And store the data into the matrix
            userFeatures.setColumn(f, uvec);
            assert Math.abs(userFeatures.getColumnView(f).elementSum() - uvec.elementSum()) < 1.0e-4 : "user column sum matches";
            itemFeatures.setColumn(f, ivec);
            assert Math.abs(itemFeatures.getColumnView(f).elementSum() - ivec.elementSum()) < 1.0e-4 : "item column sum matches";

            timer.stop();
            logger.info("Finished feature {} in {}", f, timer);
        }

        // Wrap the user/item matrices because we won't use or modify them again
        return new FunkSVDModel(ImmutableMatrix.wrap(userFeatures),
View Full Code Here

    // warm up the mapper
    mapper.map(src, destClass);

    // perform x number of additional mappings
    log.info("Begin timings for " + testName);
    StopWatch timer = new StopWatch();
    timer.start();
    for (int i = 0; i < numIters; i++) {
      mapper.map(src, destClass);
    }
    timer.stop();
    log.info("Total time for additional " + numIters + " mappings: " + timer.getTime() + " milliseconds");
    log.info("avg time for " + numIters + " mappings: " + (timer.getTime() / numIters) + " milliseconds");
  }
View Full Code Here

    dbn.setMomentum(0);
   
    int recordsProcessed = 0;
   
   
    StopWatch watch = new StopWatch();
    watch.start();
   
    StopWatch batchWatch = new StopWatch();
   
   
//    do  {
     
      recordsProcessed += batchSize;
     
      System.out.println( "PreTrain: Batch Mode, Processed Total " + recordsProcessed + ", Elapsed Time " + watch.toString() );
     
      batchWatch.reset();
      batchWatch.start();
      dbn.preTrain( recordBatch.getFirst(), 1, learningRate, preTrainEpochs);
      batchWatch.stop();
     
      System.out.println( "Batch Training Elapsed Time " + batchWatch.toString() );

      System.out.println( "DBN Network Stats:\n" + dbn.generateNetworkSizeReport() );

/*     
      if (fetcher.hasNext()) {
View Full Code Here

   
    int recordsProcessed = 0;
    int batchesProcessed = 0;
    long totalBatchProcessingTime = 0;
   
    StopWatch watch = new StopWatch();
    watch.start();
   
    StopWatch batchWatch = new StopWatch();
   
   
    do  {
     
      recordsProcessed += batchSize;
      batchesProcessed++;
     
      System.out.println( "PreTrain: Batch Mode, Processed Total " + recordsProcessed + ", Elapsed Time " + watch.toString() );
     
      batchWatch.reset();
      batchWatch.start();
      dbn.preTrain( first.getFirst(), 1, learningRate, preTrainEpochs);
      batchWatch.stop();
     
      totalBatchProcessingTime += batchWatch.getTime();
     
      System.out.println( "Batch Training Elapsed Time " + batchWatch.toString() );

      //System.out.println( "DBN Network Stats:\n" + dbn.generateNetworkSizeReport() );

     
      if (fetcher.hasNext()) {
View Full Code Here

    System.out.println("Worker > Compute() -------------------------- ");

   
    int recordsProcessed = 0;
   
    StopWatch batchWatch = new StopWatch();
   
    DataSet hdfs_recordBatch = null; //this.hdfs_fetcher.next();
   
    System.out.println("Iteration: " + this.currentIteration );
   
//    if (hdfs_recordBatch.getFirst().numRows() > 0) {
//    do  {
   
    if ( TrainingState.PRE_TRAIN == this.currentTrainingState ) {
   
      System.out.println("Worker > PRE TRAIN! " );
     
       if ( this.hdfs_fetcher.hasNext() ) {
       
        
        
        hdfs_recordBatch = this.hdfs_fetcher.next();

        System.out.println("Worker > Has Next! > Recs: " + hdfs_recordBatch.getFirst().numRows() );
       
        // check for the straggler batch condition
        if (0 == this.currentIteration && hdfs_recordBatch.getFirst().numRows() > 0 && hdfs_recordBatch.getFirst().numRows() < this.batchSize) {
         
        //  System.out.println( "Worker > Straggler Batch Condition!" );
         
          // ok, only in this situation do we lower the batch size
          this.batchSize = hdfs_recordBatch.getFirst().numRows();

          // re-setup the dataset iterator
          try {
            this.hdfs_fetcher = new MnistHDFSDataSetIterator( this.batchSize, this.totalTrainingDatasetSize, (TextRecordParser)lineParser );
          } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
          }

        //  System.out.println( "Worker > PreTrain: Setting up for a straggler split... (sub batch size)" );         
        //  System.out.println( "New batch size: " + this.batchSize );
        } else {
         
        //  System.out.println( "Worker > NO Straggler Batch Condition!" );
         
        }
       
        if (hdfs_recordBatch.getFirst().numRows() > 0) {
         
          if (hdfs_recordBatch.getFirst().numRows() < this.batchSize) {
           
          //  System.out.println( "Worker > PreTrain: [Jagged End of Split: Skipped] Processed Total " + recordsProcessed + " Total Time " + watch.toString() );
           
           
          } else {
           
          //  System.out.println( "Worker > Normal Processing!" );
           
            // calc stats on number records processed
            recordsProcessed += hdfs_recordBatch.getFirst().numRows();
           
            //System.out.println( "PreTrain: Batch Size: " + hdfs_recordBatch.getFirst().numRows() );
           
            batchWatch.reset();
           
            batchWatch.start();
       
            this.dbn.preTrain( hdfs_recordBatch.getFirst(), 1, this.learningRate, this.preTrainEpochs);
           
            batchWatch.stop();
   
            System.out.println( "Worker > PreTrain: Batch Mode, Processed Total " + recordsProcessed + ", Batch Time " + batchWatch.toString() + " Total Time " + watch.toString() );
   
          } // if
         
         
        } else {
       
          // in case we get a blank line
          System.out.println( "Worker > PreTrain > Idle pass, no records left to process in phase" );
         
        }
       
      } else {
       
        System.out.println( "Worker > PreTrain > Idle pass, no records left to process in phase" );
       
      }
     
    //  System.out.println( "Worker > Check PreTrain completion > completedEpochs: " + this.completedDatasetEpochs + ", preTrainDatasetPasses: " + this.preTrainDatasetPasses );
     
      // check for completion of split, to signal master on state change
      if (false == this.hdfs_fetcher.hasNext() && this.completedDatasetEpochs + 1 >= this.preTrainDatasetPasses ) {
       
        this.preTrainPhaseComplete = true;
      //  System.out.println( "Worker > Completion of pre-train phase" );
       
      }
     
         
   
    } else if ( TrainingState.FINE_TUNE == this.currentTrainingState) {
     
      //System.out.println( "DBN Network Stats:\n" + dbn.generateNetworkSizeReport() );

      if ( this.hdfs_fetcher.hasNext() ) {
       
        hdfs_recordBatch = this.hdfs_fetcher.next();
       
        if (hdfs_recordBatch.getFirst().numRows() > 0) {
         
          if (hdfs_recordBatch.getFirst().numRows() < this.batchSize) {
           
          //  System.out.println( "Worker > FineTune: [Jagged End of Split: Skipped] Processed Total " + recordsProcessed + " Total Time " + watch.toString() );

          } else {
           
            batchWatch.reset();
           
            batchWatch.start();
           
            this.dbn.finetune( hdfs_recordBatch.getSecond(), learningRate, fineTuneEpochs );
           
            batchWatch.stop();
           
            System.out.println( "Worker > FineTune > Batch Mode, Processed Total " + recordsProcessed + ", Batch Time " + batchWatch.toString() + " Total Time " + watch.toString() );
           
          }
         
        } else {
         
View Full Code Here

    public PreBuildOperation(final T owner, final AbstractBuild<?,?> build, final BuildListener listener) {
        super(owner, build, listener);
    }

    public boolean execute() {
        StopWatch watch = new StopWatch();
        watch.start();

        log.debug("Executing");
        try {
            boolean result = doExecute();
            log.debug("Finished in {}", watch);
View Full Code Here

        super(owner, build, listener);
        this.launcher = checkNotNull(launcher);
    }

    public boolean execute() throws InterruptedException, IOException {
        StopWatch watch = new StopWatch();
        watch.start();
        log.debug("Executing");
        try {
            boolean result = doExecute();
            log.debug("Finished in {}", watch);
            return result;
View Full Code Here

        String sql = "SELECT v.proposal_id as proposalId, avg(v.note) as moy, count(v.proposal_id) as nbVote, t.title as proposalTitle FROM vote v, proposal t where (v.proposal_id=t.id) group by v.proposal_id order by moy desc";
        List<SqlRow> rows = Ebean.createSqlQuery(sql).findList();
       
        Map<Long, Pair<Double, Integer>> moyennes = new HashMap<Long, Pair<Double, Integer>>();
        for (SqlRow row : rows) {
            Pair<Double, Integer> moyProposal = new ImmutablePair(row.getDouble("moy"), row.getInteger("nbVote"));
            moyennes.put(row.getLong("proposalId"), moyProposal);
        }
       
        return moyennes;
    }
View Full Code Here

TOP

Related Classes of org.apache.commons.lang3.time.StopWatch

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.