Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FileSystem.listPaths()


    //pass a job.jar already included in the hadoop build
    conf.setJar("build/test/testjar/testjob.jar");
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();

    Path[] fileList = fs.listPaths(outDir);
    for (int i = 0; i < fileList.length; ++i) {
      BufferedReader file = new BufferedReader(new InputStreamReader(
                                                                     fs.open(fileList[i])));
      String line = file.readLine();
      while (line != null) {
View Full Code Here


      // The following are test cases for listPaths which is a deprecated
      // API. These tests shoudl go away when the API is removed.

      // issue a listPaths on directory /test/mkdirs and verify that the
      // size of the files inside it are valid
      Path[] files = fs.listPaths(dir);
      assertTrue(dir + " should have two files", files.length == 2);
      for (int i = 0; i < files.length; i++) {
        DfsPath dfspath = (DfsPath) files[i];
        assertTrue(files[i] + " should be of size " + (blockSize/4),
                   blockSize/4 == dfspath.getContentsLength());
View Full Code Here

                   blockSize/4 == fs.getContentLength(dfspath));
      }

      // issue a listPath on directory /test and verify that the
      // size returned for /test/mkdirs directory is correct.
      Path[] dirs = fs.listPaths(parentDir);
      assertTrue(parentDir + " should have one sub directory",
                 dirs.length == 1);
      DfsPath dfsdir = (DfsPath) dirs[0];
      assertTrue(dirs[0] + " should be of size " + blockSize/2,
                 fs.getContentLength(dfsdir) == blockSize/2);
View Full Code Here

  /** Open the output generated by this format. */
  public static SequenceFile.Reader[] getReaders(Configuration conf, Path dir)
    throws IOException {
    FileSystem fs = FileSystem.get(conf);
    Path[] names = fs.listPaths(dir);
   
    // sort names, so that hash partitioning works
    Arrays.sort(names);
   
    SequenceFile.Reader[] parts = new SequenceFile.Reader[names.length];
View Full Code Here

    }
    List<Path> result = new ArrayList();
    for (Path p: dirs) {
      FileSystem fs = p.getFileSystem(job);
      Path[] matches =
        fs.listPaths(fs.globPaths(p, hiddenFileFilter),hiddenFileFilter);
      for (Path match: matches) {
        result.add(fs.makeQualified(match));
      }
    }
View Full Code Here

    for (Path p: inputDirs) {
      FileSystem fs = p.getFileSystem(job);
      if (fs.exists(p)) {
        // make sure all paths are files to avoid exception
        // while generating splits
        for (Path subPath : fs.listPaths(p, hiddenFileFilter)) {
          FileSystem subFS = subPath.getFileSystem(job);
          if (!subFS.isFile(subPath)) {
            result.add(new IOException(
                "Input path is not a file : " + subPath));
          } else {
View Full Code Here

        file.close();
         
        job = new StreamJob(argv, mayExit);     
        job.go();
        String line = null;
        Path[] fileList = fileSys.listPaths(new Path(OUTPUT_DIR));
        for (int i = 0; i < fileList.length; i++){
          System.out.println(fileList[i].toString());
          BufferedReader bread =
            new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
          line = bread.readLine();
View Full Code Here

    //pass a job.jar already included in the hadoop build
    conf.setJar("build/test/testjar/testjob.jar");
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();
    {
      Path[] fileList = fs.listPaths(outDir);
      for(int i=0; i < fileList.length; ++i) {
        BufferedReader file =
          new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
        String line = file.readLine();
        while (line != null) {
View Full Code Here

    conf.setNumMapTasks(numMaps);
    conf.setNumReduceTasks(numReduces);
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();
    {
      Path[] fileList = fs.listPaths(outDir);
      for(int i=0; i < fileList.length; ++i) {
        BufferedReader file =
          new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
        String line = file.readLine();
        while (line != null) {
View Full Code Here

    ArrayList segs = new ArrayList();
    long sliceSize = 0;
    boolean filter = false;
    for (int i = 1; i < args.length; i++) {
      if (args[i].equals("-dir")) {
        Path[] files = fs.listPaths(new Path(args[++i]), new PathFilter() {
          public boolean accept(Path f) {
            try {
              if (fs.isDirectory(f)) return true;
            } catch (IOException e) {}
            ;
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.