} while (hasmore);
} catch (EOFException e) {
e.printStackTrace();
return hasmore = false;
} catch (IOException e) {
throw new AvroBaseException("Failed to read s3 object stream", e);
}
} while (!hasmore);
return hasmore;
}
private boolean startArchived() {
// Scan S3
if (files == null) {
files = getArchives();
}
// If .next() hasn't been called return the previous answer
if (hasmore != null) return hasmore;
// Read the files in sequence, moving between them as they run out of data
if (firstdelegaterow != null) {
do {
if (assertCurrentStream()) return hasmore = false;
try {
do {
hasmore = currentstream.readBoolean();
if (hasmore) {
if (nextRowInStream()) return hasmore = false;
if (firstdelegatekey != null) {
// skip archived rows we have already scanned
final int compare = bytesComparator.compare(firstdelegatekey, row);
if (compare >= 0) {
break;
} else {
currentstream.readFully(new byte[currentstream.readInt()]);
}
} else {
break;
}
} else {
currentstream.close();
currentstream = null;
}
} while (hasmore);
} catch (EOFException e) {
e.printStackTrace();
return hasmore = false;
} catch (IOException e) {
throw new AvroBaseException("Failed to read s3 object stream", e);
}
} while (!hasmore);
} else {
hasmore = false;
}
if (!hasmore) {
if (stopRow != null && bytesComparator.compare(row, stopRow) >= 0) {
return false;
}
if (firstdelegaterow != null) {
firstrow = true;
return hasmore = true;
}
return hasmore = iterator.hasNext();
}
return hasmore;
}
private boolean nextRowInStream() throws IOException {
// We may be reading things that are supposed to be still in the
// local store but haven't been deleted yet
row = new byte[currentstream.readInt()];
currentstream.readFully(row);
if (stopRow != null) {
int compare = bytesComparator.compare(row, stopRow);
if (compare >= 0) {
currentstream.close();
currentstream = null;
return true;
}
}
return false;
}
private boolean assertCurrentStream() {
if (currentstream == null) {
if (files.size() == 0) return true;
final S3Object nextFile = files.remove(0);
try {
currentstream = new DataInputStream(new GZIPInputStream(getInputStream(nextFile)));
final byte[] bytes = new byte[currentstream.readInt()];
currentstream.readFully(bytes);
schema = Schema.parse(new ByteArrayInputStream(bytes));
} catch (ServiceException e) {
throw new AvroBaseException("Failed to read inputstream from S3: " + nextFile, e);
} catch (IOException e) {
throw new AvroBaseException("Failed to read schema", e);
}
}
return false;
}
@Override
public synchronized Row<T, byte[]> next() {
// Grab the next local value
if (reverse) {
if (firstrow) {
try {
firstrow = false;
hasmore = null;
return firstdelegaterow;
} finally {
firstdelegaterow = null;
}
}
if (firstdelegaterow == null) {
return iterator.next();
}
} else {
if (lastdelegatekey == null) return lastdelegaterow = iterator.next();
}
// Grab the next S3 value
if ((files.size() == 0 && currentstream == null) || (hasmore != null && !hasmore))
throw new NoSuchElementException();
hasmore = null;
try {
byte[] bytes = new byte[currentstream.readInt()];
currentstream.readFully(bytes);
SpecificDatumReader<T> sdr = new SpecificDatumReader<T>(schema, actualSchema);
T read = sdr.read(null, decoderFactory.binaryDecoder(bytes, null));
return new Row<T, byte[]>(read, row);
} catch (IOException e) {
throw new AvroBaseException("Invalid data in log", e);
}
}
@Override
public void remove() {