Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FileSystem.open()


      throws IOException {
    List<Path> result = new ArrayList<Path>();
    FileSystem fs = srcList.getFileSystem(conf);
    BufferedReader input = null;
    try {
      input = new BufferedReader(new InputStreamReader(fs.open(srcList)));
      String line = input.readLine();
      while (line != null) {
        result.add(new Path(line));
        line = input.readLine();
      }
View Full Code Here


    try {
      for (int pos = 0; pos < infile.getLen(); pos += rand.nextInt(flen / 8)) {
        // read from random positions, verifying that there exist two sequential
        // lines as written in writeSplitTestFile
        final SplitCompressionInputStream in =
          codec.createInputStream(fs.open(infile.getPath()), dcmp,
              pos, flen, SplittableCompressionCodec.READ_MODE.BYBLOCK);
        if (in.getAdjustedStart() >= flen) {
          break;
        }
        LOG.info("SAMPLE " + in.getAdjustedStart() + "," + in.getAdjustedEnd());
View Full Code Here

    // decompressor to use.
    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
    CompressionCodec codec = ccf.getCodec(f);
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    FileSystem fs = FileSystem.getLocal(conf);
    InputStream is = fs.open(f);
    is = codec.createInputStream(is, decompressor);
    BufferedReader br = new BufferedReader(new InputStreamReader(is));
    String line = br.readLine();
    assertEquals("Didn't get the same message back!", msg, line);
    br.close();
View Full Code Here

        raFile.close();
      }
    }
    // Read the file to trigger reportBadBlocks
    try {
      IOUtils.copyBytes(fs.open(file1), new IOUtils.NullOutputStream(), conf,
                        true);
    } catch (IOException ie) {
      // Ignore exception
    }
View Full Code Here

            }
        } finally {
            output.close();
        }
        FileStatus status = fs.getFileStatus(new Path(temp.toURI()));
        FSDataInputStream src = fs.open(status.getPath());
        try {
            OutputStream dst = writer.openNext(status);
            byte[] buf = new byte[256];
            while (true) {
                int read = src.read(buf);
View Full Code Here

    private List<String> get(String target) throws IOException {
        FileSystem fs = FileSystem.get(tester.configuration());
        List<String> results = Lists.create();
        for (Path path : find(target)) {
            InputStream input = fs.open(path);
            try {
                Scanner s = new Scanner(new InputStreamReader(input, "UTF-8"));
                while (s.hasNextLine()) {
                    results.add(s.nextLine());
                }
View Full Code Here

                    "Opening temporary input: {0} (fs={1})",
                    path,
                    fs.getUri()));
        }
        if (Writable.class.isAssignableFrom(dataType)) {
            return (ModelInput<V>) new TemporaryFileInput<Writable>(fs.open(path), 0);
        }
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf);
        return (ModelInput<V>) new SequenceFileModelInput<Writable>(reader);
    }
View Full Code Here

                Path path,
                long offset,
                long fragmentSize,
                Counter counter) throws IOException, InterruptedException {
            FileSystem fs = FileSystem.get(path.toUri(), getConf());
            FSDataInputStream in = fs.open(path);
            boolean succeed = false;
            try {
                in.seek(offset);
                ModelInput<StringBuilder> result = format.createInput(
                        dataType,
View Full Code Here

                            "Finish creating transaction info: job={0}, path={1}",
                            jobContext.getJobID(),
                            fs.makeQualified(transactionInfo)));
                }
                if (LOG.isTraceEnabled()) {
                    FSDataInputStream input = fs.open(transactionInfo);
                    try {
                        Scanner scanner = new Scanner(
                                new InputStreamReader(input, HadoopDataSourceUtil.COMMENT_CHARSET));
                        while (scanner.hasNextLine()) {
                            String line = scanner.nextLine();
View Full Code Here

        List<String> comment = new ArrayList<String>();
        Path commitMarkPath = HadoopDataSourceUtil.getCommitMarkPath(getConf(), executionId);
        FileSystem fs = path.getFileSystem(getConf());
        boolean committed = fs.exists(commitMarkPath);
        try {
            FSDataInputStream input = fs.open(path);
            try {
                Scanner scanner = new Scanner(new InputStreamReader(input, HadoopDataSourceUtil.COMMENT_CHARSET));
                while (scanner.hasNextLine()) {
                    comment.add(scanner.nextLine());
                }
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.