Package org.apache.hadoop.hdfs.protocol

Examples of org.apache.hadoop.hdfs.protocol.DatanodeInfo


                             one.getGenerationStamp());
    List<TDatanodeID> locs = tblk.location;
    DatanodeInfo[] dn = new DatanodeInfo[locs.size()];
    for (int j = 0; j < dn.length; j++) {
      String name = locs.get(j).name;
      dn[j] = new DatanodeInfo(new DatanodeID(name, "", -1, getPort(name)));
    }
    return new LocatedBlock(hblock, dn);
  }
View Full Code Here


        DatanodeInfo[] excludes = null;
        if (excludedNodes != null) {
          excludes = new DatanodeInfo[excludedNodes.size()];
          for (int i = 0; i < excludes.length; i++) {
            String name = excludedNodes.get(i).name;
            excludes[i] = new DatanodeInfo(
                   new DatanodeID(name, "", -1, getPort(name)));
          }
        }

        // initialize favoured nodes
        DatanodeInfo[] favoured = null;
        if (favouredNodes != null) {
          favoured = new DatanodeInfo[favouredNodes.size()];
          for (int i = 0; i < favoured.length; i++) {
            String name = favouredNodes.get(i).name;
            favoured[i] = new DatanodeInfo(
                   new DatanodeID(name, "", -1, getPort(name)));
          }
        }

        LocatedBlockWithMetaInfo val =  namenode.addBlockAndFetchMetaInfo(
View Full Code Here

              skip = false;
              break;
            }
            int priority = numberOfNeighborBlocks;
            LocatedBlockWithMetaInfo lb = resolver.getLocatedBlock(block);
            DatanodeInfo datanode = resolver.getDatanodeInfo(node);
            Set<DatanodeInfo> excludedDatanodes = new HashSet<DatanodeInfo>();
            for (String name : excludedNodes) {
              excludedDatanodes.add(resolver.getDatanodeInfo(name));
            }
            if (lb != null) {
View Full Code Here

      }
      if (liveNodes.length <= excluded.size()) {
        return liveNodes[rand.nextInt(liveNodes.length)];
      }
      for (;;) {
        DatanodeInfo target = liveNodes[rand.nextInt(liveNodes.length)];
        if (!excluded.contains(target)) {
          return target;
        }
      }
    }
View Full Code Here

    Socket s = null;
    BlockReader blockReader = null;
    Block block = lblock.getBlock();

    while (s == null) {
      DatanodeInfo chosenNode;
     
      try {
        chosenNode = bestNode(dfs, lblock.getLocations(), deadNodes);
        targetAddr = NetUtils.createSocketAddr(chosenNode.getName());
      catch (IOException ie) {
        if (failures >= DFSClient.MAX_BLOCK_ACQUIRE_FAILURES) {
          throw new IOException("Could not obtain block " + lblock);
        }
        LOG.info("Could not obtain block from any node:  " + ie);
View Full Code Here

                                TreeSet<DatanodeInfo> deadNodes) throws IOException {
    if ((nodes == null) ||
        (nodes.length - deadNodes.size() < 1)) {
      throw new IOException("No live nodes contain current block");
    }
    DatanodeInfo chosenNode;
    do {
      chosenNode = nodes[r.nextInt(nodes.length)];
    } while (deadNodes.contains(chosenNode));
    return chosenNode;
  }
View Full Code Here

  /* Shuffle datanode array */
  static private void shuffleArray(DatanodeInfo[] datanodes) {
    for (int i=datanodes.length; i>1; i--) {
      int randomIndex = rnd.nextInt(i);
      DatanodeInfo tmp = datanodes[randomIndex];
      datanodes[randomIndex] = datanodes[i-1];
      datanodes[i-1] = tmp;
    }
  }
View Full Code Here

  public Map<DatanodeInfo, Integer> getDatanodeErrors() {
    return Collections.unmodifiableMap(this.datanodeErrors);
  }

  private static void swap(int i, int j, DatanodeInfo[] arr) {
    DatanodeInfo tmp = arr[i];
    arr[i] = arr[j];
    arr[j] = tmp;
  }
View Full Code Here

    this.minReadSpeedBps = conf.getLong("dfs.min.read.speed.bps", -1);
    this.maxBlockAcquireFailures = getMaxBlockAcquireFailures(conf);
    this.localHost = InetAddress.getLocalHost();
   
    // fetch network location of localhost
    this.pseuDatanodeInfoForLocalhost = new DatanodeInfo(new DatanodeID(
        this.localHost.getHostAddress()));
    this.dnsToSwitchMapping = ReflectionUtils.newInstance(
        conf.getClass("topology.node.switch.mapping.impl", ScriptBasedMapping.class,
          DNSToSwitchMapping.class), conf);
    ArrayList<String> tempList = new ArrayList<String>();
View Full Code Here

      // address and port of that node.
      DatanodeInfo[] favoredNodeInfos = null;
      if (favoredNodes != null) {
        favoredNodeInfos = new DatanodeInfo[favoredNodes.length];
        for (int i = 0; i < favoredNodes.length; i++) {
          favoredNodeInfos[i] = new DatanodeInfo(new DatanodeID(
              favoredNodes[i].getAddress().getHostAddress() + ":" +
              favoredNodes[i].getPort()));
        }
      }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hdfs.protocol.DatanodeInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.