Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSDataInputStream$Buffer


  }
  @SuppressWarnings("unchecked")
  private <T> T getSplitDetails(Path file, long offset)
   throws IOException {
    FileSystem fs = file.getFileSystem(conf);
    FSDataInputStream inFile = fs.open(file);
    inFile.seek(offset);
    String className = Text.readString(inFile);
    Class<T> cls;
    try {
      cls = (Class<T>) conf.getClassByName(className);
    } catch (ClassNotFoundException ce) {
      IOException wrap = new IOException("Split class " + className +
                                          " not found");
      wrap.initCause(ce);
      throw wrap;
    }
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer<T> deserializer =
      (Deserializer<T>) factory.getDeserializer(cls);
    deserializer.open(inFile);
    T split = deserializer.deserialize(null);
    long pos = inFile.getPos();
    getCounters().findCounter(
         Task.Counter.SPLIT_RAW_BYTES).increment(pos - offset);
    inFile.close();
    return split;
  }
View Full Code Here


  private LineReader maybeUncompressedPath(Path p)
      throws FileNotFoundException, IOException {
    CompressionCodecFactory codecs = new CompressionCodecFactory(getConf());
    inputCodec = codecs.getCodec(p);
    FileSystem fs = p.getFileSystem(getConf());
    FSDataInputStream fileIn = fs.open(p);

    if (inputCodec == null) {
      return new LineReader(fileIn, getConf());
    } else {
      inputDecompressor = CodecPool.getDecompressor(inputCodec);
View Full Code Here

        reporter.progress();
      }
      else {
        FileSystem srcFs = srcPath.getFileSystem(conf);
        FileStatus srcStatus = srcFs.getFileStatus(srcPath);
        FSDataInputStream input = srcFs.open(srcStatus.getPath());
        reporter.setStatus("Copying file " + srcStatus.getPath() +
            " to archive.");
        copyData(srcStatus.getPath(), input, partStream, reporter);
        towrite = relPath.toString() + " file " + partname + " " + startPos
        + " " + srcStatus.getLen() + " ";
View Full Code Here

      {
        DistributedFileSystem dfs = (DistributedFileSystem)cluster.getFileSystem();
        assertFalse(dfs.dfs.isLeaseCheckerStarted());

        //open and check the file
        FSDataInputStream in = dfs.open(filepath);
        assertFalse(dfs.dfs.isLeaseCheckerStarted());
        assertEquals(millis, in.readLong());
        assertFalse(dfs.dfs.isLeaseCheckerStarted());
        in.close();
        assertFalse(dfs.dfs.isLeaseCheckerStarted());
        dfs.close();
      }
    }
    finally {
View Full Code Here

    out.close();
  }
 
  void readFile(FileSystem fs, Path path, int fileLen) throws IOException {
    byte [] arr = new byte[fileLen];
    FSDataInputStream in = fs.open(path);
    in.readFully(arr);
  }
View Full Code Here

            Thread.sleep(2000);
          } catch (InterruptedException e) {}
        } else {
          // Lease is valid; test and return the JobTracker in the jtPath file
          try {
            FSDataInputStream in = fs.open(jtPath);
            BufferedReader br = new BufferedReader(new InputStreamReader(in));
            String jtAddress = br.readLine().trim();
            in.close();
            // Try to connect to the JT listed in jtAddress
            JobConf clientConf = new JobConf(conf);
            clientConf.set("mapred.job.tracker", jtAddress);
            JobClient client = new JobClient();
            client.init(clientConf);
View Full Code Here

    InputStream input;
    if (codec == null) {
      input = fs.open(path);
      decompressor = null;
    } else {
      FSDataInputStream fsdis = fs.open(path);
      decompressor = CodecPool.getDecompressor(codec);
      input = codec.createInputStream(fsdis, decompressor);
    }
    jsonParser = mapper.getJsonFactory().createJsonParser(input);
  }
View Full Code Here

    int writeSize = blockSize / 2;
    out.write(new byte[writeSize]);
    out.sync();
   
    FSDataInputStream in = fileSystem.open(file1);
   
    byte[] buf = new byte[4096];
    in.readFully(0, buf);
    in.close();

    waitForBlocks(fileSystem, file1, 1, writeSize);
   
    int blockMapSize = cluster.getDataNodes().get(0).blockScanner.blockMap.size();
    assertTrue(
View Full Code Here

   * except as arguments to the q option.
   */
  static Varargs format( Varargs args ) {
    LuaString fmt = args.checkstring( 1 );
    final int n = fmt.length();
    Buffer result = new Buffer(n);
    int arg = 1;
    int c;
   
    for ( int i = 0; i < n; ) {
      switch ( c = fmt.luaByte( i++ ) ) {
      case '\n':
        result.append( "\n" );
        break;
      default:
        result.append( (byte) c );
        break;
      case L_ESC:
        if ( i < n ) {
          if ( ( c = fmt.luaByte( i ) ) == L_ESC ) {
            ++i;
            result.append( (byte)L_ESC );
          } else {
            arg++;
            FormatDesc fdsc = new FormatDesc(args, fmt, i );
            i += fdsc.length;
            switch ( fdsc.conversion ) {
            case 'c':
              fdsc.format( result, (byte)args.checkint( arg ) );
              break;
            case 'i':
            case 'd':
              fdsc.format( result, args.checkint( arg ) );
              break;
            case 'o':
            case 'u':
            case 'x':
            case 'X':
              fdsc.format( result, args.checklong( arg ) );
              break;
            case 'e':
            case 'E':
            case 'f':
            case 'g':
            case 'G':
              fdsc.format( result, args.checkdouble( arg ) );
              break;
            case 'q':
              addquoted( result, args.checkstring( arg ) );
              break;
            case 's': {
              LuaString s = args.checkstring( arg );
              if ( fdsc.precision == -1 && s.length() >= 100 ) {
                result.append( s );
              } else {
                fdsc.format( result, s );
              }
            }  break;
            default:
              error("invalid option '%"+(char)fdsc.conversion+"' to 'format'");
              break;
            }
          }
        }
      }
    }
   
    return result.tostring();
  }
View Full Code Here

    LuaString p = args.checkstring( 2 );
    LuaValue repl = args.arg( 3 );
    int max_s = args.optint( 4, srclen + 1 );
    final boolean anchor = p.length() > 0 && p.charAt( 0 ) == '^';
   
    Buffer lbuf = new Buffer( srclen );
    MatchState ms = new MatchState( args, src, p );
   
    int soffset = 0;
    int n = 0;
    while ( n < max_s ) {
      ms.reset();
      int res = ms.match( soffset, anchor ? 1 : 0 );
      if ( res != -1 ) {
        n++;
        ms.add_value( lbuf, soffset, res, repl );
      }
      if ( res != -1 && res > soffset )
        soffset = res;
      else if ( soffset < srclen )
        lbuf.append( (byte) src.luaByte( soffset++ ) );
      else
        break;
      if ( anchor )
        break;
    }
    lbuf.append( src.substring( soffset, srclen ) );
    return varargsOf(lbuf.tostring(), valueOf(n));
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.fs.FSDataInputStream$Buffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.