Package org.apache.hadoop.io.serializer

Examples of org.apache.hadoop.io.serializer.Deserializer


  }

  public void readFields(ITuple tuple, Deserializer[] customDeserializers) throws IOException {
    Schema schema = tuple.getSchema();
    for(int index = 0; index < schema.getFields().size(); index++) {
      Deserializer customDeser = customDeserializers[index];
      Field field = schema.getField(index);
      switch(field.getType()) {
      case INT:
        tuple.set(index, WritableUtils.readVInt(input));
        break;
View Full Code Here


    inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
    mapperClass = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in);
    inputSplit = (InputSplit) ReflectionUtils
       .newInstance(inputSplitClass, conf);
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer deserializer = factory.getDeserializer(inputSplitClass);
    deserializer.open((DataInputStream)in);
    inputSplit = (InputSplit)deserializer.deserialize(inputSplit);
  }
View Full Code Here

        String splitClassName = is.readUTF();
        try {
            Class splitClass = conf.getClassByName(splitClassName);
            SerializationFactory sf = new SerializationFactory(conf);
            // The correct call sequence for Deserializer is, we shall open, then deserialize, but we shall not close
            Deserializer d = sf.getDeserializer(splitClass);
            d.open((InputStream) is);
            wrappedSplits = new InputSplit[splitLen];
            for (int i = 0; i < splitLen; i++)
            {
                wrappedSplits[i] = (InputSplit)ReflectionUtils.newInstance(splitClass, conf);
                d.deserialize(wrappedSplits[i]);
            }
        } catch (ClassNotFoundException e) {
            throw new IOException(e);
        }
       
View Full Code Here

    }

    Class klazz = obj.getClass();
    Object out = null;
    Serializer s = serializationFactory.getSerializer(klazz);
    Deserializer ds = serializationFactory.getDeserializer(klazz);

    try {
      s.open(outBuffer);
      ds.open(inBuffer);

      outBuffer.reset();
      s.serialize(obj);

      byte [] data = outBuffer.getData();
      int len = outBuffer.getLength();
      inBuffer.reset(data, len);

      out = ds.deserialize(out);

      return out;
    } finally {
      try {
        s.close();
      } catch (IOException ioe) {
        // ignore this; we're closing.
      }

      try {
        ds.close();
      } catch (IOException ioe) {
        // ignore this; we're closing.
      }
    }
  }
View Full Code Here

    inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
    mapperClass = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in);
    inputSplit = (InputSplit) ReflectionUtils
       .newInstance(inputSplitClass, conf);
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer deserializer = factory.getDeserializer(inputSplitClass);
    deserializer.open((DataInputStream)in);
    inputSplit = (InputSplit)deserializer.deserialize(inputSplit);
  }
View Full Code Here

    conf.readFields(in);
    inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
    Class<? extends InputSplit> inputSplitClass = (Class<? extends InputSplit>) readClass(in);
    inputSplit = (InputSplit) ReflectionUtils.newInstance(inputSplitClass, conf);
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer deserializer = factory.getDeserializer(inputSplitClass);
    deserializer.open((DataInputStream) in);
    inputSplit = (InputSplit) deserializer.deserialize(inputSplit);
  }
View Full Code Here

    }

    Class klazz = obj.getClass();
    Object out = null;
    Serializer s = serializationFactory.getSerializer(klazz);
    Deserializer ds = serializationFactory.getDeserializer(klazz);

    try {
      s.open(outBuffer);
      ds.open(inBuffer);

      outBuffer.reset();
      s.serialize(obj);

      byte [] data = outBuffer.getData();
      int len = outBuffer.getLength();
      inBuffer.reset(data, len);

      out = ds.deserialize(out);

      return out;
    } finally {
      try {
        s.close();
      } catch (IOException ioe) {
        // ignore this; we're closing.
      }

      try {
        ds.close();
      } catch (IOException ioe) {
        // ignore this; we're closing.
      }
    }
  }
View Full Code Here

        if (writableVersionOfClazz != null) {
          clazz = writableVersionOfClazz;
        }

        // Now deserialize using the hadoop deserializer.
        Deserializer deserializer = getNewDeserializer(clazz);
        deserializer.open(new ByteArrayInputStream(bytes));
        Object o = clazz.newInstance();
        o = deserializer.deserialize(o);
        deserializer.close();

        if (writableVersionOfClazz != null) {
          return fromWritable(o);
        } else {
          return o;
View Full Code Here

    inputFormatFile = Text.readString(in);
    inputProcessorFile = Text.readString(in);
    inputSplit = (InputSplit) ReflectionUtils
       .newInstance(inputSplitClass, conf);
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer deserializer = factory.getDeserializer(inputSplitClass);
    deserializer.open((DataInputStream)in);
    inputSplit = (InputSplit)deserializer.deserialize(inputSplit);
  }
View Full Code Here

      }
    }

    // Field by field deserialization
    for(int index = 0; index < schema.getFields().size(); index++) {
      Deserializer customDeser = customDeserializers[index];
      Field field = schema.getField(index);

      // Nulls control
      if(field.isNullable() && nullsAbsolute.flags[index]) {
        // Null field. Nothing to deserialize.
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.serializer.Deserializer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.