Package org.apache.hadoop.hdfs.protocol

Examples of org.apache.hadoop.hdfs.protocol.CachePoolInfo


          Joiner.on(" ").join(args) + "\n");
        System.err.println("Usage is " + getShortUsage());
        return 1;
      }
      boolean changed = false;
      CachePoolInfo info = new CachePoolInfo(name);
      if (owner != null) {
        info.setOwnerName(owner);
        changed = true;
      }
      if (group != null) {
        info.setGroupName(group);
        changed = true;
      }
      if (mode != null) {
        info.setMode(new FsPermission(mode.shortValue()));
        changed = true;
      }
      if (limit != null) {
        info.setLimit(limit);
        changed = true;
      }
      if (maxTtl != null) {
        info.setMaxRelativeExpiryMs(maxTtl);
        changed = true;
      }
      if (!changed) {
        System.err.println("You must specify at least one attribute to " +
            "change in the cache pool.");
View Full Code Here


      int numResults = 0;
      try {
        RemoteIterator<CachePoolEntry> iter = dfs.listCachePools();
        while (iter.hasNext()) {
          CachePoolEntry entry = iter.next();
          CachePoolInfo info = entry.getInfo();
          LinkedList<String> row = new LinkedList<String>();
          if (name == null || info.getPoolName().equals(name)) {
            row.add(info.getPoolName());
            row.add(info.getOwnerName());
            row.add(info.getGroupName());
            row.add(info.getMode() != null ? info.getMode().toString() : null);
            Long limit = info.getLimit();
            String limitString;
            if (limit != null && limit.equals(CachePoolInfo.LIMIT_UNLIMITED)) {
              limitString = "unlimited";
            } else {
              limitString = "" + limit;
            }
            row.add(limitString);
            Long maxTtl = info.getMaxRelativeExpiryMs();
            String maxTtlString = null;

            if (maxTtl != null) {
              if (maxTtl.longValue() == CachePoolInfo.RELATIVE_EXPIRY_NEVER) {
                maxTtlString  = "never";
View Full Code Here

  }

  public static CachePoolInfo convert (CachePoolInfoProto proto) {
    // Pool name is a required field, the rest are optional
    String poolName = checkNotNull(proto.getPoolName());
    CachePoolInfo info = new CachePoolInfo(poolName);
    if (proto.hasOwnerName()) {
        info.setOwnerName(proto.getOwnerName());
    }
    if (proto.hasGroupName()) {
      info.setGroupName(proto.getGroupName());
    }
    if (proto.hasMode()) {
      info.setMode(new FsPermission((short)proto.getMode()));
    }
    if (proto.hasLimit())  {
      info.setLimit(proto.getLimit());
    }
    if (proto.hasMaxRelativeExpiry()) {
      info.setMaxRelativeExpiryMs(proto.getMaxRelativeExpiry());
    }
    return info;
  }
View Full Code Here

    builder.setStats(PBHelper.convert(entry.getStats()));
    return builder.build();
  }

  public static CachePoolEntry convert (CachePoolEntryProto proto) {
    CachePoolInfo info = PBHelper.convert(proto.getInfo());
    CachePoolStats stats = PBHelper.convert(proto.getStats());
    return new CachePoolEntry(info, stats);
  }
View Full Code Here

  }

  public static CachePoolInfo convert (CachePoolInfoProto proto) {
    // Pool name is a required field, the rest are optional
    String poolName = checkNotNull(proto.getPoolName());
    CachePoolInfo info = new CachePoolInfo(poolName);
    if (proto.hasOwnerName()) {
        info.setOwnerName(proto.getOwnerName());
    }
    if (proto.hasGroupName()) {
      info.setGroupName(proto.getGroupName());
    }
    if (proto.hasMode()) {
      info.setMode(new FsPermission((short)proto.getMode()));
    }
    if (proto.hasLimit())  {
      info.setLimit(proto.getLimit());
    }
    if (proto.hasMaxRelativeExpiry()) {
      info.setMaxRelativeExpiryMs(proto.getMaxRelativeExpiry());
    }
    return info;
  }
View Full Code Here

    builder.setStats(PBHelper.convert(entry.getStats()));
    return builder.build();
  }

  public static CachePoolEntry convert (CachePoolEntryProto proto) {
    CachePoolInfo info = PBHelper.convert(proto.getInfo());
    CachePoolStats stats = PBHelper.convert(proto.getStats());
    return new CachePoolEntry(info, stats);
  }
View Full Code Here

            "Cannot add cache pool " + req.getPoolName(), safeMode);
      }
      if (pc != null) {
        pc.checkSuperuserPrivilege();
      }
      CachePoolInfo info = cacheManager.addCachePool(req);
      getEditLog().logAddCachePool(info, cacheEntry != null);
      success = true;
    } finally {
      writeUnlock();
      if (isAuditEnabled() && isExternalInvocation()) {
View Full Code Here

      this.newReplication = newReplication;
    }

    @Override
    void prepare() throws Exception {
      dfs.addCachePool(new CachePoolInfo(directive.getPool()));
      id = client.addCacheDirective(directive, EnumSet.of(CacheFlag.FORCE));
    }
View Full Code Here

          build();
    }

    @Override
    void prepare() throws Exception {
      dfs.addCachePool(new CachePoolInfo(directive.getPool()));
      id = dfs.addCacheDirective(directive, EnumSet.of(CacheFlag.FORCE));
    }
View Full Code Here

    void prepare() throws Exception {
    }

    @Override
    void invoke() throws Exception {
      client.addCachePool(new CachePoolInfo(pool));
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hdfs.protocol.CachePoolInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.