Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.RawComparator


    for(int depth = 0; depth < criteria.getElements().size(); depth++) {
      Field field = schema.getField(depth);
      Field.Type type = field.getType();
      SortElement sortElement = criteria.getElements().get(depth);
      Order sort = sortElement.getOrder();
      RawComparator comparator = sortElement.getCustomComparator();

      if(comparator != null) {
        //custom comparator for OBJECT
        int length1 = WritableComparator.readVInt(b1, o.offset1);
        int length2 = WritableComparator.readVInt(b2, o.offset2);
        o.offset1 += WritableUtils.decodeVIntSize(b1[o.offset1]);
        o.offset2 += WritableUtils.decodeVIntSize(b2[o.offset2]);
        int comparison = comparator.compare(b1, o.offset1, length1, b2,
            o.offset2, length2);
        o.offset1 += length1;
        o.offset2 += length2;
        if(comparison != 0) {
          return (sort == Order.ASC) ? comparison : -comparison;
View Full Code Here


  // Test the key grouping and value ordering comparators
  @Test
  public void testComparators() {
    // group comparator - group by first character
    RawComparator groupComparator = new RawComparator() {
      @Override
      public int compare(Object o1, Object o2) {
        return o1.toString().substring(0, 1).compareTo(
            o2.toString().substring(0, 1));
      }

      @Override
      public int compare(byte[] arg0, int arg1, int arg2, byte[] arg3,
          int arg4, int arg5) {
        throw new RuntimeException("Not implemented");
     
    };
   
    // value order comparator - order by second character
    RawComparator orderComparator = new RawComparator() {
      @Override
      public int compare(Object o1, Object o2) {
        return o1.toString().substring(1, 2).compareTo(
            o2.toString().substring(1, 2));
      }
View Full Code Here

    sortPhase.complete();                         // sort is complete
    setPhase(TaskStatus.Phase.REDUCE);
    statusUpdate(umbilical);
    Class keyClass = job.getMapOutputKeyClass();
    Class valueClass = job.getMapOutputValueClass();
    RawComparator comparator = job.getOutputValueGroupingComparator();

    if (useNewApi) {
      runNewReducer(job, umbilical, reporter, rIter, comparator,
                    keyClass, valueClass);
    } else {
View Full Code Here

 
  // Test the key grouping and value ordering comparators
  @Test
  public void testComparators() {
    // group comparator - group by first character
    RawComparator groupComparator = new RawComparator() {
      @Override
      public int compare(Object o1, Object o2) {
        return o1.toString().substring(0, 1).compareTo(
            o2.toString().substring(0, 1));
      }

      @Override
      public int compare(byte[] arg0, int arg1, int arg2, byte[] arg3,
          int arg4, int arg5) {
        throw new RuntimeException("Not implemented");
     
    };
   
    // value order comparator - order by second character
    RawComparator orderComparator = new RawComparator() {
      @Override
      public int compare(Object o1, Object o2) {
        return o1.toString().substring(1, 2).compareTo(
            o2.toString().substring(1, 2));
      }
View Full Code Here

    for(int depth = 0; depth < criteria.getElements().size(); depth++) {
      Field field = schema.getField(depth);
      Field.Type type = field.getType();
      SortElement sortElement = criteria.getElements().get(depth);
      Order sort = sortElement.getOrder();
      RawComparator comparator = sortElement.getCustomComparator();

      if(comparator != null) {
        // Provided specific Comparator. Some field types has different
        // header length and field length.
        int[] lengths1 = getHeaderLengthAndFieldLength(b1, o.offset1, type);
        int[] lengths2 = getHeaderLengthAndFieldLength(b2, o.offset2, type);
        int dataSize1 = lengths1[1];
        int dataSize2 = lengths2[1];
        int totalField1Size = lengths1[0] + dataSize1; // Header size + data
                                                       // size
        int totalField2Size = lengths2[0] + dataSize2; // Header size + data
                                                       // size
        int comparison = comparator.compare(b1, o.offset1, totalField1Size, b2,
            o.offset2, totalField2Size);
        o.offset1 += totalField1Size;
        o.offset2 += totalField2Size;
        if(comparison != 0) {
          return (sort == Order.ASC) ? comparison : -comparison;
View Full Code Here

    sortPhase.complete();                         // sort is complete
    setPhase(TaskStatus.Phase.REDUCE);
    statusUpdate(umbilical);
    Class keyClass = job.getMapOutputKeyClass();
    Class valueClass = job.getMapOutputValueClass();
    RawComparator comparator = job.getOutputValueGroupingComparator();

    if (useNewApi) {
      runNewReducer(job, umbilical, reporter, rIter, comparator,
                    keyClass, valueClass);
    } else {
View Full Code Here

    sortPhase.complete();                         // sort is complete
    setPhase(TaskStatus.Phase.REDUCE);
    statusUpdate(umbilical);
    Class keyClass = job.getMapOutputKeyClass();
    Class valueClass = job.getMapOutputValueClass();
    RawComparator comparator = job.getOutputValueGroupingComparator();

    if (useNewApi) {
      runNewReducer(job, umbilical, reporter, rIter, comparator,
                    keyClass, valueClass);
    } else {
View Full Code Here

    for(int depth = 0; depth < criteria.getElements().size(); depth++) {
      Field field = schema.getField(depth);
      Field.Type type = field.getType();
      SortElement sortElement = criteria.getElements().get(depth);
      Order sort = sortElement.getOrder();
      RawComparator comparator = sortElement.getCustomComparator();

      if(comparator != null) {
        //custom comparator for OBJECT
        int length1 = WritableComparator.readVInt(b1, o.offset1);
        int length2 = WritableComparator.readVInt(b2, o.offset2);
        o.offset1 += WritableUtils.decodeVIntSize(b1[o.offset1]);
        o.offset2 += WritableUtils.decodeVIntSize(b2[o.offset2]);
        int comparison = comparator.compare(b1, o.offset1, length1, b2,
            o.offset2, length2);
        o.offset1 += length1;
        o.offset2 += length2;
        if(comparison != 0) {
          return (sort == Order.ASC) ? comparison : -comparison;
View Full Code Here

    sortPhase.complete();                         // sort is complete
    setPhase(TaskStatus.Phase.REDUCE);
    statusUpdate(umbilical);
    Class keyClass = job.getMapOutputKeyClass();
    Class valueClass = job.getMapOutputValueClass();
    RawComparator comparator = job.getOutputValueGroupingComparator();

    if (useNewApi) {
      runNewReducer(job, umbilical, reporter, rIter, comparator,
                    keyClass, valueClass);
    } else {
View Full Code Here

      LOGGER.info("Samples retrieved, sorting...");
 
      ////////////////////////////////////////////////
      // sort the samples
      ////////////////////////////////////////////////
      RawComparator comparator = job.getOutputKeyComparator ()
      Arrays.sort (samples, comparator);
     
      if( job.getBoolean("mobius.print.sample", false) )
      {
        PrintWriter pw = new PrintWriter(new OutputStreamWriter(new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(new File(job.get("mobius.sample.file", "./samples.txt.gz")))))));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.RawComparator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.