Examples of seek()


Examples of org.apache.hadoop.hbase.util.CollectionBackedScanner.seek()

      encodeSeeker.seekToKeyInBlock(firstOnRow.getBuffer(),
          firstOnRow.getKeyOffset(), firstOnRow.getKeyLength(), false);
      boolean hasMoreOfEncodeScanner = encodeSeeker.next();
      CollectionBackedScanner collectionScanner = new CollectionBackedScanner(
          this.kvset);
      boolean hasMoreOfCollectionScanner = collectionScanner.seek(firstOnRow);
      if (hasMoreOfEncodeScanner != hasMoreOfCollectionScanner) {
        dumpInputKVSet();
        fail("Get error result after seeking " + firstOnRow);
      }
      if (hasMoreOfEncodeScanner) {
View Full Code Here

Examples of org.apache.hadoop.hdfs.ByteRangeInputStream.seek()

               o.getMsg());

    r.setMsg(null);
    r.setURL(new URL("http://resolvedurl/"));
   
    is.seek(100);
    is.read();

    assertEquals("Seek to 100 bytes made incorrectly",
                 "Connect: http://resolvedurl/, Range: bytes=100-",
                 r.getMsg());
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream.seek()

      final DFSClient dfsclient = new DFSClient(nnRpcAddr, conf);
      DFSDataInputStream in = null;
      try {
        in = new DFSClient.DFSDataInputStream(
        dfsclient.open(fullpath, b, true, null));
        in.seek(offset.getValue());
      } catch(IOException ioe) {
        IOUtils.cleanup(LOG, in);
        IOUtils.cleanup(LOG, dfsclient);
        throw ioe;
      }
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient.DFSInputStream.seek()

        .when(spyNN).getBlockLocations(anyString(), anyLong(), anyLong());
      is.openInfo();
      // Seek to beginning forces a reopen of the BlockReader - otherwise it'll
      // just keep reading on the existing stream and the fact that we've poisoned
      // the block info won't do anything.
      is.seek(0);
      IOUtils.readFully(is, buf, 0, buf.length);

    } finally {
      cluster.shutdown();
    }
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSInputStream.seek()

          .when(spyNN).openAndFetchMetaInfo(anyString(), anyLong(), anyLong());
        is.openInfo();
        // Seek to beginning forces a reopen of the BlockReader - otherwise it'll
        // just keep reading on the existing stream and the fact that we've poisoned
        // the block info won't do anything.
        is.seek(0);
        IOUtils.readFully(is, buf, 0, buf.length);
 
      } finally {
        if (null != cluster) {
          cluster.shutdown();
View Full Code Here

Examples of org.apache.hadoop.hdfs.client.HdfsDataInputStream.seek()

      final int b = bufferSize.getValue(conf);
      final DFSClient dfsclient = newDfsClient(nnId, conf);
      HdfsDataInputStream in = null;
      try {
        in = new HdfsDataInputStream(dfsclient.open(fullpath, b, true));
        in.seek(offset.getValue());
      } catch(IOException ioe) {
        IOUtils.cleanup(LOG, in);
        IOUtils.cleanup(LOG, dfsclient);
        throw ioe;
      }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.datanode.BlockDataFile.RandomAccessor.seek()

    try {
      // truncate blockFile
      ra.setLength(newlen);

      // read last chunk
      ra.seek(lastchunkoffset);
      ra.readFully(b, 0, lastchunksize);
    } finally {
      ra.close();
    }
View Full Code Here

Examples of org.apache.hadoop.hdfs.web.WebHdfsFileSystem.OffsetUrlInputStream.seek()

    // No additional connections should have been made (no seek)

    rspy.setURL(new URL("http://resolvedurl/"));

    is.seek(100);
    is.read();

    assertEquals("getPos should be 101 after reading one byte", 101,
        is.getPos());
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream.seek()

          return (InputStream)in;
        }
      }
    }
    FSDataInputStream in = fs.open(file);
    in.seek(offset);
    return (InputStream)in;
  }
 
  /**
   * Returns a {@link OutputStream} for a file that might need
View Full Code Here

Examples of org.apache.hadoop.yarn.server.utils.LeveldbIterator.seek()

    RecoveredDeletionServiceState state = new RecoveredDeletionServiceState();
    state.tasks = new ArrayList<DeletionServiceDeleteTaskProto>();
    LeveldbIterator iter = null;
    try {
      iter = new LeveldbIterator(db);
      iter.seek(bytes(DELETION_TASK_KEY_PREFIX));
      while (iter.hasNext()) {
        Entry<byte[], byte[]> entry = iter.next();
        String key = asString(entry.getKey());
        if (!key.startsWith(DELETION_TASK_KEY_PREFIX)) {
          break;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.