Package org.apache.hadoop.hdfs.protocol

Examples of org.apache.hadoop.hdfs.protocol.HdfsFileStatus


        LOG.info("Can't get path for dir fileId:" + dirHandle.getFileId());
        return new REMOVE3Response(Nfs3Status.NFS3ERR_STALE);
      }

      String fileIdPath = dirFileIdPath + "/" + fileName;
      HdfsFileStatus fstat = Nfs3Utils.getFileStatus(dfsClient, fileIdPath);
      if (fstat == null) {
        WccData dirWcc = new WccData(Nfs3Utils.getWccAttr(preOpDirAttr),
            preOpDirAttr);
        return new REMOVE3Response(Nfs3Status.NFS3ERR_NOENT, dirWcc);
      }
      if (fstat.isDir()) {
        WccData dirWcc = new WccData(Nfs3Utils.getWccAttr(preOpDirAttr),
            preOpDirAttr);
        return new REMOVE3Response(Nfs3Status.NFS3ERR_ISDIR, dirWcc);
      }
View Full Code Here


      if (!checkAccessPrivilege(client, AccessPrivilege.READ_WRITE)) {
        return new RMDIR3Response(Nfs3Status.NFS3ERR_ACCES, errWcc);
      }

      String fileIdPath = dirFileIdPath + "/" + fileName;
      HdfsFileStatus fstat = Nfs3Utils.getFileStatus(dfsClient, fileIdPath);
      if (fstat == null) {
        return new RMDIR3Response(Nfs3Status.NFS3ERR_NOENT, errWcc);
      }
      if (!fstat.isDir()) {
        return new RMDIR3Response(Nfs3Status.NFS3ERR_NOTDIR, errWcc);
      }
     
      if (fstat.getChildrenNum() > 0) {
        return new RMDIR3Response(Nfs3Status.NFS3ERR_NOTEMPTY, errWcc);
      }

      boolean result = dfsClient.delete(fileIdPath, false);
      WccData dirWcc = Nfs3Utils.createWccData(
View Full Code Here

      dfsClient.createSymlink(symData, linkIdPath, false);
      // Set symlink attr is considered as to change the attr of the target
      // file. So no need to set symlink attr here after it's created.

      HdfsFileStatus linkstat = dfsClient.getFileLinkInfo(linkIdPath);
      Nfs3FileAttributes objAttr = Nfs3Utils.getNfs3FileAttrFromFileStatus(
          linkstat, iug);
      dirWcc
          .setPostOpAttr(Nfs3Utils.getFileAttr(dfsClient, linkDirIdPath, iug));
View Full Code Here

    if (LOG.isDebugEnabled()) {
      LOG.debug("NFS READDIR fileId: " + handle.getFileId() + " cookie: "
          + cookie + " count: " + count);
    }

    HdfsFileStatus dirStatus = null;
    DirectoryListing dlisting = null;
    Nfs3FileAttributes postOpAttr = null;
    long dotdotFileId = 0;
    try {
      String dirFileIdPath = Nfs3Utils.getFileIdPath(handle);
      dirStatus = dfsClient.getFileInfo(dirFileIdPath);
      if (dirStatus == null) {
        LOG.info("Can't get path for fileId:" + handle.getFileId());
        return new READDIR3Response(Nfs3Status.NFS3ERR_STALE);
      }
      if (!dirStatus.isDir()) {
        LOG.error("Can't readdir for regular file, fileId:"
            + handle.getFileId());
        return new READDIR3Response(Nfs3Status.NFS3ERR_NOTDIR);
      }
      long cookieVerf = request.getCookieVerf();
      if ((cookieVerf != 0) && (cookieVerf != dirStatus.getModificationTime())) {
        LOG.error("CookierVerf mismatch. request cookierVerf:" + cookieVerf
            + " dir cookieVerf:" + dirStatus.getModificationTime());
        return new READDIR3Response(Nfs3Status.NFS3ERR_BAD_COOKIE);
      }

      if (cookie == 0) {
        // Get dotdot fileId
        String dotdotFileIdPath = dirFileIdPath + "/..";
        HdfsFileStatus dotdotStatus = dfsClient.getFileInfo(dotdotFileIdPath);

        if (dotdotStatus == null) {
          // This should not happen
          throw new IOException("Can't get path for handle path:"
              + dotdotFileIdPath);
        }
        dotdotFileId = dotdotStatus.getFileId();
      }

      // Get the list from the resume point
      byte[] startAfter;
      if(cookie == 0 ) {
View Full Code Here

    if (LOG.isDebugEnabled()) {
      LOG.debug("NFS READDIRPLUS fileId: " + handle.getFileId() + " cookie: "
          + cookie + " dirCount: " + dirCount + " maxCount: " + maxCount);
    }

    HdfsFileStatus dirStatus;
    DirectoryListing dlisting = null;
    Nfs3FileAttributes postOpDirAttr = null;
    long dotdotFileId = 0;
    try {
      String dirFileIdPath = Nfs3Utils.getFileIdPath(handle);
      dirStatus = dfsClient.getFileInfo(dirFileIdPath);
      if (dirStatus == null) {
        LOG.info("Can't get path for fileId:" + handle.getFileId());
        return new READDIRPLUS3Response(Nfs3Status.NFS3ERR_STALE);
      }
      if (!dirStatus.isDir()) {
        LOG.error("Can't readdirplus for regular file, fileId:"
            + handle.getFileId());
        return new READDIRPLUS3Response(Nfs3Status.NFS3ERR_NOTDIR);
      }
      long cookieVerf = request.getCookieVerf();
      if ((cookieVerf != 0) && (cookieVerf != dirStatus.getModificationTime())) {
        LOG.error("CookierVerf mismatch. request cookierVerf:" + cookieVerf
            + " dir cookieVerf:" + dirStatus.getModificationTime());
        return new READDIRPLUS3Response(Nfs3Status.NFS3ERR_BAD_COOKIE);
      }

      if (cookie == 0) {
        // Get dotdot fileId
        String dotdotFileIdPath = dirFileIdPath + "/..";
        HdfsFileStatus dotdotStatus = dfsClient.getFileInfo(dotdotFileIdPath);

        if (dotdotStatus == null) {
          // This should not happen
          throw new IOException("Can't get path for handle path:"
              + dotdotFileIdPath);
        }
        dotdotFileId = dotdotStatus.getFileId();
      }

      // Get the list from the resume point
      byte[] startAfter;
      if (cookie == 0) {
View Full Code Here

      return;
    }
   
    DFSClient dfs = getDFSClient(ugi, nnAddr, conf);
    String target = dir;
    final HdfsFileStatus targetStatus = dfs.getFileInfo(target);
    if (targetStatus == null) { // not exists
      out.print("<h3>File or directory : " + target + " does not exist</h3>");
      JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, target,
          nnAddr);
    } else {
      if (!targetStatus.isDir()) { // a file
        List<LocatedBlock> blocks = dfs.getNamenode().getBlockLocations(dir, 0, 1)
            .getLocatedBlocks();

        LocatedBlock firstBlock = null;
        DatanodeInfo[] locations = null;
View Full Code Here

  }

  private void setPermissionInt(String src, FsPermission permission)
      throws AccessControlException, FileNotFoundException, SafeModeException,
      UnresolvedLinkException, IOException {
    HdfsFileStatus resultingStat = null;
    writeLock();
    try {
      checkOperation(OperationCategory.WRITE);

      if (isInSafeMode()) {
View Full Code Here

  }

  private void setOwnerInt(String src, String username, String group)
      throws AccessControlException, FileNotFoundException, SafeModeException,
      UnresolvedLinkException, IOException {
    HdfsFileStatus resultingStat = null;
    writeLock();
    try {
      checkOperation(OperationCategory.WRITE);

      if (isInSafeMode()) {
View Full Code Here

        throw new IllegalArgumentException(
           "Sources and target are not in the same directory");
      }
    }

    HdfsFileStatus resultingStat = null;
    writeLock();
    try {
      checkOperation(OperationCategory.WRITE);
      if (isInSafeMode()) {
        throw new SafeModeException("Cannot concat " + target, safeMode);
View Full Code Here

      }
      INode inode = dir.getINode(src);
      if (inode != null) {
        dir.setTimes(src, inode, mtime, atime, true);
        if (auditLog.isInfoEnabled() && isExternalInvocation()) {
          final HdfsFileStatus stat = dir.getFileInfo(src, false);
          logAuditEvent(UserGroupInformation.getCurrentUser(),
                        getRemoteIp(),
                        "setTimes", src, null, stat);
        }
      } else {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hdfs.protocol.HdfsFileStatus

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.