Examples of HdfsFileStatus


Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

      final String path, final HttpOpParam.Op op, final long openOffset
      ) throws IOException {
    if (op == GetOpParam.Op.OPEN
        || op == GetOpParam.Op.GETFILECHECKSUM
        || op == PostOpParam.Op.APPEND) {
      final HdfsFileStatus status = namenode.getFileInfo(path);
      if (status == null) {
        throw new FileNotFoundException("File " + path + " not found.");
      }
      final long len = status.getLen();
      if (op == GetOpParam.Op.OPEN && (openOffset < 0L || openOffset >= len)) {
        throw new IOException("Offset=" + openOffset + " out of the range [0, "
          + len + "); " + op + ", path=" + path);
      }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

      final String js = JsonUtil.toJsonString(locatedblocks);
      return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
    }
    case GETFILESTATUS:
    {
      final HdfsFileStatus status = namenode.getFileInfo(fullpath);
      if (status == null) {
        throw new FileNotFoundException("File does not exist: " + fullpath);
      }

      final String js = JsonUtil.toJsonString(status, true);
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

      System.out.println("Path : \"" + path.toString() + "\"");
      assertTrue("/ should be a directory",
                 fs.getFileStatus(path).isDir() == true);
     
      // make sure getFileInfo returns null for files which do not exist
      HdfsFileStatus fileInfo = dfsClient.getFileInfo("/noSuchFile");
      assertTrue(fileInfo == null);

      // create a file in home directory
      //
      Path file1 = new Path("filestatus.dat");
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

                                  + "Mkdir operation.");
          }
          String s = FSImage.readString(in);
          String d = FSImage.readString(in);
          timestamp = readLong(in);
          HdfsFileStatus dinfo = fsDir.getFileInfo(d);
          fsDir.unprotectedRenameTo(s, d, timestamp);
          fsNamesys.changeLease(s, d, dinfo);
          break;
        }
        case OP_DELETE: {
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

  /**
   * Returns the stat information about the file.
   * @throws FileNotFoundException if the file does not exist.
   */
  public FileStatus getFileStatus(Path f) throws IOException {
    HdfsFileStatus fi = dfs.getFileInfo(getPathName(f));
    if (fi != null) {
      return makeQualified(fi, f);
    } else {
      throw new FileNotFoundException("File does not exist: " + f);
    }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

          doc.startTag("listing");
          for (Map.Entry<String,String> m : root.entrySet()) {
            doc.attribute(m.getKey(), m.getValue());
          }

          HdfsFileStatus base = nn.getFileInfo(path);
          if ((base != null) && base.isDir()) {
            writeInfo(path, base, doc);
          }

          Stack<String> pathstack = new Stack<String>();
          pathstack.push(path);
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

      INodeDirectory dirInode = (INodeDirectory)targetNode;
      List<INode> contents = dirInode.getChildren();
      int startChild = dirInode.nextChild(startAfter);
      int totalNumChildren = contents.size();
      int numOfListing = Math.min(totalNumChildren-startChild, this.lsLimit);
      HdfsFileStatus listing[] = new HdfsFileStatus[numOfListing];
      for (int i=0; i<numOfListing; i++) {
        INode cur = contents.get(startChild+i);
        listing[i] = createFileStatus(cur.name, cur);
      }
      return new DirectoryListing(
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

  /**
   * Create FileStatus by file INode
   */
   private static HdfsFileStatus createFileStatus(byte[] path, INode node) {
    // length is zero for directories
    return new HdfsFileStatus(
        node.isDirectory() ? 0 : ((INodeFile)node).computeContentSummary().getLength(),
        node.isDirectory(),
        node.isDirectory() ? 0 : ((INodeFile)node).getReplication(),
        node.isDirectory() ? 0 : ((INodeFile)node).getPreferredBlockSize(),
        node.getModificationTime(),
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

                request.getPathInfo() != null ? request.getPathInfo() : "/";
             
              String delegationToken =
                request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
             
              HdfsFileStatus info = nn.getFileInfo(path);
              if ((info != null) && !info.isDir()) {
                try {
                  response.sendRedirect(createUri(path, info, ugi, nn,
                        request, delegationToken).toURL().toString());
                } catch (URISyntaxException e) {
                  response.getWriter().println(e.toString());
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

   * Check files on DFS, starting from the indicated path.
   */
  public void fsck() {
    try {
      Result res = new Result(conf);
      final HdfsFileStatus file = namenode.getFileInfo(path);
      if (file != null) {
        check(path, file, res);

        out.println(res);
        out.println(" Number of data-nodes:\t\t" + totalDatanodes);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.