Package org.apache.hadoop.mapred.lib

Examples of org.apache.hadoop.mapred.lib.MultipleOutputs


       _props = KafkaETLUtils.getPropsFromJob(job);
       _contextList = new ArrayList<KafkaETLContext>();
       _job = job;
       _reporter = reporter;
       _contextIndex = -1;
       _mos = new MultipleOutputs(job);
       try {
           _limit = _props.getInt("kafka.request.limit", -1);
          
           /*get attemp id*/
           String taskId = _job.get("mapred.task.id");
View Full Code Here


        } else {
          mRs.add(new UpperTriangular(rValue.get()));
        }
        block++;
      }
      outputs = new MultipleOutputs(new JobConf(context.getConfiguration()));
    }
View Full Code Here

      long omegaSeed = Long.parseLong(context.getConfiguration().get(PROP_OMEGA_SEED));
      r = Integer.parseInt(context.getConfiguration().get(PROP_AROWBLOCK_SIZE));
      omega = new Omega(omegaSeed, k, p);
      yLookahead = new ArrayList<double[]>(kp);
      qSolver = new GivensThinSolver(r, kp);
      outputs = new MultipleOutputs(new JobConf(context.getConfiguration()));
      closeables.addFirst(new Closeable() {
        @Override
        public void close() throws IOException {
          outputs.close();
        }
View Full Code Here

      reporter.incrCounter(MyCounter.TOTAL_DOCS, 1);
    }

    public void configure(JobConf conf) {
      multipleOutputs = new MultipleOutputs(conf);

      try {
        Path[] inputFiles = DistributedCache.getLocalCacheFiles(conf);
        if (inputFiles != null) {
          for (Path path : inputFiles) {
View Full Code Here

  // private BloomMap outputValue = null;
  // private ProbDist outputValue = null;
  private HMapIDW outputValue = null;

  public void configure(JobConf conf) {
    multipleOutputs = new MultipleOutputs(conf);

    learning = conf.getBoolean(Settings.PROPERTY_PREFIX + "model.train", Settings.LEARNING_MODE);

    // truncateBeta = conf.getBoolean(Settings.PROPERTY_PREFIX + "model.truncate.beta", false);
View Full Code Here

  private double normalizeFactor = 0;

  private OutputCollector<PairOfIntFloat, HMapIDW> outputBeta;

  public void configure(JobConf conf) {
    multipleOutputs = new MultipleOutputs(conf);

    learning = conf.getBoolean(Settings.PROPERTY_PREFIX + "model.train", Settings.LEARNING_MODE);

    // System.out.println("======================================================================");
    // System.out.println("Available processors (cores): " +
View Full Code Here

    totalAlphaSufficientStatistics = new double[numberOfTopics];

    updateLogGamma = new double[numberOfTopics];
    logPhiTable = new HMapIV<double[]>();

    multipleOutputs = new MultipleOutputs(conf);

    double alphaSum = 0;

    SequenceFile.Reader sequenceFileReader = null;
    try {
View Full Code Here

    learning = conf.getBoolean(Settings.PROPERTY_PREFIX + "model.train", Settings.LEARNING_MODE);
    randomStartGamma = conf.getBoolean(Settings.PROPERTY_PREFIX + "model.random.start",
        Settings.RANDOM_START_GAMMA);

    multipleOutputs = new MultipleOutputs(conf);

    updateGamma = new double[numberOfTopics];
    phiTable = new HashMap[numberOfLanguages];
    for (int languageIndex = 0; languageIndex < numberOfLanguages; languageIndex++) {
      phiTable[languageIndex] = new HashMap<Integer, double[]>();
View Full Code Here

        sb = SSVDHelper.loadAndSumUpVectors(new Path(sbPathStr), conf);
        if (sb == null)
          throw new IOException(String.format("Unable to load s_omega from path %s.", sbPathStr));
      }

      outputs = new MultipleOutputs(new JobConf(conf));
      closeables.addFirst(new Closeable() {
        @Override
        public void close() throws IOException {
          outputs.close();
        }
View Full Code Here

    int p = Integer.parseInt(jobConf.get(PROP_P));
    kp = k + p;

    yLookahead = Lists.newArrayListWithCapacity(kp);
    qSolver = new GivensThinSolver(r, kp);
    outputs = new MultipleOutputs(new JobConf(jobConf));
    closeables.addFirst(new Closeable() {
      @Override
      public void close() throws IOException {
        outputs.close();
      }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.lib.MultipleOutputs

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.