throw new RuntimeException("File record already has data; overwrite not allowed! fileName: " + fileName);
}
// TODO: is this assumption ok?
// Get the currently open database for this thread and set intent.
final ODatabase database = ODatabaseRecordThreadLocal.INSTANCE.get();
database.declareIntent(new OIntentMassiveInsert());
// Insert File data.
final long fileSize = file.length();
final FileInputStream in = new FileInputStream(file);
try {
final int CHUNK_SIZE = 81920;
int bufferedBytes;
final byte[] buffer = new byte[CHUNK_SIZE];
byte currentPercent = 0;
final int fullChunks = (int) (fileSize / CHUNK_SIZE);
final long fullChunksSize = fullChunks * CHUNK_SIZE;
final int totalChunks;
if (fileSize > fullChunksSize) {
totalChunks = fullChunks + 1;
} else {
totalChunks = fullChunks;
}
final List<ORID> chunkRids = new ArrayList<ORID>(totalChunks);
// Make only one ORecordBytes instance and reuse it for every chunk,
// to reduce heap garbage.
final ORecordBytes chunk = new ORecordBytes();
// Handle the full chunks.
for (int page = 0; page < fullChunks; page++) {
// Read a full chunk of data from the file into a buffer.
bufferedBytes = 0;
while (bufferedBytes < buffer.length) {
final int bytesRead = in.read(buffer, bufferedBytes, buffer.length - bufferedBytes);
if (bytesRead == -1) {
throw new Exception("Reached end of file prematurely. (File changed while reading?) fileName=" + file.getAbsolutePath());
}
bufferedBytes += bytesRead;
}
// Save the chunk to the database.
final long saveStartTime = System.currentTimeMillis();
chunk.reset(buffer);
chunk.save();
final long saveMs = System.currentTimeMillis() - saveStartTime;
// Log the amount of time taken by the save.
System.out.printf("Saved chunk %d in %d ms.\n", page, saveMs);
// Save the chunk's record ID in the list.
// Have to copy() the ORID or else every chunk in the list gets the same last ORID.
// This is because we are using the chunk.reset(); approach to reduce garbage objects.
chunkRids.add(chunk.getIdentity().copy());
// Only report progress if it has changed.
final byte percent = (byte) ((page + 1) * 100 / totalChunks);
if (percent > currentPercent) {
System.out.printf("Progress: %d%%\n", percent);
currentPercent = percent;
}
}
// Handle the final partial chunk (if any).
if (fullChunks < totalChunks) {
final int remainder = (int) (fileSize - fullChunksSize);
// Read the remaining data from the file into a buffer.
bufferedBytes = 0;
while (bufferedBytes < remainder) {
final int bytesRead = in.read(buffer, bufferedBytes, remainder - bufferedBytes);
if (bytesRead == -1) {
throw new Exception("Reached end of file prematurely. (File changed while reading?) fileName=" + file.getAbsolutePath());
}
bufferedBytes += bytesRead;
}
// Save the chunk to the database.
final long saveStartTime = System.currentTimeMillis();
chunk.reset(Arrays.copyOf(buffer, remainder));
chunk.save();
final long saveMs = System.currentTimeMillis() - saveStartTime;
// Log the amount of time taken by the save.
System.out.printf("Saved partial chunk %d in %d ms.\n", fullChunks, saveMs);
// Save the chunk's record ID in the list.
chunkRids.add(chunk.getIdentity());
}
// Should be no more data, so validate this.
final int b = in.read();
if (b != -1) {
throw new Exception("File changed while saving to database! fileName=" + file.getAbsolutePath());
}
// Report 100% progress if we haven't already.
if (currentPercent < 100) {
System.out.println("Progress: 100%");
}
// Save the list of chunk references.
final long saveChunkListStartTime = System.currentTimeMillis();
fileDoc.field("DataChunks", chunkRids);
fileDoc.save();
final long saveChunkListMs = System.currentTimeMillis() - saveChunkListStartTime;
// Log the amount of time taken to save the list of chunk RIDs.
System.out.printf("Saved list of %d chunk RIDs in %d ms.\n", chunkRids.size(), saveChunkListMs);
} finally {
database.declareIntent(null);
in.close();
}
}