Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSDataInputStream$Buffer


  public ReaderImpl(FileSystem fs, Path path, Configuration conf) throws IOException {
    this.fileSystem = fs;
    this.path = path;
    this.conf = conf;
    FSDataInputStream file = fs.open(path);
    long size = fs.getFileStatus(path).getLen();
    int readSize = (int) Math.min(size, DIRECTORY_SIZE_GUESS);
    ByteBuffer buffer = ByteBuffer.allocate(readSize);
    InStream.read(file, size - readSize, buffer.array(), buffer.arrayOffset() + buffer.position(),
        buffer.remaining());
    int psLen = buffer.get(readSize - 1);
    int psOffset = readSize - 1 - psLen;
    CodedInputStream in = CodedInputStream.newInstance(buffer.array(),
      buffer.arrayOffset() + psOffset, psLen);
    OrcProto.PostScript ps = OrcProto.PostScript.parseFrom(in);
    int footerSize = (int) ps.getFooterLength();
    bufferSize = (int) ps.getCompressionBlockSize();
    switch (ps.getCompression()) {
      case NONE:
        compressionKind = CompressionKind.NONE;
        break;
      case ZLIB:
        compressionKind = CompressionKind.ZLIB;
        break;
      case SNAPPY:
        compressionKind = CompressionKind.SNAPPY;
        break;
      case LZO:
        compressionKind = CompressionKind.LZO;
        break;
      default:
        throw new IllegalArgumentException("Unknown compression");
    }
    codec = WriterImpl.createCodec(compressionKind);

    InputStream instream = InStream.create("footer", file, size - 1 - psLen - footerSize, footerSize,
        codec, bufferSize);
    footer = OrcProto.Footer.parseFrom(instream);
    inspector = new OrcLazyRowObjectInspector(0, footer.getTypesList());
    file.close();
  }
View Full Code Here


   */
  private static void checkIfORC(FileSystem fs, Path path) throws IOException {
    // hardcoded to 40 because "SEQ-org.apache.hadoop.hive.ql.io.RCFile", the header, is of 40 chars
    final int buffLen = 40;
    final byte header[] = new byte[buffLen];
    final FSDataInputStream file = fs.open(path);
    final long fileLength = fs.getFileStatus(path).getLen();
    int sizeToBeRead = buffLen;
    if (buffLen > fileLength) {
      sizeToBeRead = (int)fileLength;
    }

    IOUtils.readFully(file, header, 0, sizeToBeRead);
    file.close();

    final String headerString = new String(header);
    if (headerString.startsWith("ORC")) {
      LOG.error("Error while parsing the footer of the file : " + path);
    } else {
View Full Code Here

                        }
                    });

                    if(storeFiles != null && storeFiles.length > 0) {
                        Arrays.sort(storeFiles, new IndexFileLastComparator());
                        FSDataInputStream input = null;

                        for(FileStatus file: storeFiles) {
                            try {
                                input = outputFs.open(file.getPath());
                                byte fileCheckSum[] = new byte[CheckSum.checkSumLength(this.checkSumType)];
                                input.read(fileCheckSum);
                                logger.debug("Checksum for file " + file.toString() + " - "
                                             + new String(Hex.encodeHex(fileCheckSum)));
                                checkSumGenerator.update(fileCheckSum);
                            } catch(Exception e) {
                                logger.error("Error while reading checksum file " + e.getMessage(),
                                             e);
                            } finally {
                                if(input != null)
                                    input.close();
                            }
                            outputFs.delete(file.getPath(), false);
                        }

                        metadata.add(ReadOnlyStorageMetadata.CHECKSUM_TYPE,
View Full Code Here

    compressionCodecs = new CompressionCodecFactory(job);
    final CompressionCodec codec = compressionCodecs.getCodec(file);

    // open the file and seek to the start of the split
    FileSystem fs = file.getFileSystem(job);
    FSDataInputStream fileIn = fs.open(split.getPath());
    boolean skipFirstLine = false;
    if (codec != null) {
      in = new LineReader(codec.createInputStream(fileIn), job);
      end = Long.MAX_VALUE;
    } else {
      if (start != 0) {
        skipFirstLine = true;
        --start;
        fileIn.seek(start);
      }
      in = new LineReader(fileIn, job);
    }
    if (skipFirstLine) { // skip first line and re-establish "start".
      start += in.readLine(new Text(), 0,
View Full Code Here

    expectedOutput.add("/iDELIM9000");
    expectedOutput.add("/jDELIM10000");
    int count = 0;
    for (FileStatus fileStat: status) {
      logger.debug("File status is " + fileStat.getPath());
      FSDataInputStream in = outputFS.open(fileStat.getPath());
      String line = null;     
      while ((line = in.readLine()) != null) {
        logger.debug("Output is " + line);
        assertTrue("Matched output " + line , expectedOutput.contains(line));
        expectedOutput.remove(line);
        count++;
      }
      in.close();
    }
    assertEquals(10, count);     
  }
View Full Code Here

    expectedOutput.add("/iDELIM9000DELIM10");
    expectedOutput.add("/jDELIM10000DELIM10");
    int count = 0;
    for (FileStatus fileStat: status) {
      logger.debug("File status is " + fileStat.getPath());
      FSDataInputStream in = outputFS.open(fileStat.getPath());
      String line = null;     
      while ((line = in.readLine()) != null) {
        logger.debug("Output is " + line);
        assertTrue("Matched output " + line , expectedOutput.contains(line));
        expectedOutput.remove(line);
        count++;
      }
      in.close();
    }
    assertEquals(10, count);     
  }
View Full Code Here

  @Override
  public FSDataInputStream open(Path f, int bufferSize) throws IOException {
    Map<String, String> params = new HashMap<String, String>();
    HttpURLConnection conn = getConnection("GET", params, f);
    validateResponse(conn, HttpURLConnection.HTTP_OK);
    return new FSDataInputStream(new HoopFSDataInputStream(conn.getInputStream(), bufferSize));
  }
View Full Code Here

    if (fs.exists(vertify)) {
        Integer bytesRead = 0;
        int size = 10240;
        int maxsize = 1024 * 1024;
        byte[] buff = new byte[size];
        FSDataInputStream in = fs.open(vertify);
 
        while (true) {
      int num = in.read(buff, 0, size);
      if (num < 0) {
          break;
      }
      bytesRead += num;
      rtn.append(new String(buff, 0, num, ENCODE_NAME));
      if (bytesRead >= maxsize) {
          break;
      }
        }
        in.close();
 
    }
    return rtn.toString().trim();
    }
View Full Code Here

        Integer bytesRead = 0;
        int size = 10240;
        int maxsize = 1024 * 1024;
        byte[] buff = new byte[size];
        FSDataInputStream in = fs.open(vertify);
 
        while (true) {
      int num = in.read(buff, 0, size);
      if (num < 0) {
          break;
      }
      bytesRead += num;
      rtn.append(new String(buff, 0, num, ENCODE_NAME));
      if (bytesRead >= maxsize) {
          break;
      }
        }
        in.close();
      LOG.info("readReadTimeTs:"+rtn.toString()+":"+file.toString());

        return Long.parseLong(rtn.toString());
    }
   
View Full Code Here

     * @param zipFilePath 压缩文件路径
     * @param destDir 压缩包释放目录
     * @throws Exception
     */
    public static void unZip(FileSystem fs,String zipFilePath, FileSystem fs2,String destDir) throws Exception {
      FSDataInputStream in=fs.open(new Path(zipFilePath));
      long length = fs.getFileStatus(new Path(zipFilePath)).getLen();

        ZipFile zipFile = new ZipFile(in,length, CHINESE_CHARSET,true);
        Enumeration<?> emu = zipFile.getEntries();
        BufferedInputStream bis;
View Full Code Here

TOP

Related Classes of org.apache.hadoop.fs.FSDataInputStream$Buffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.