log.warning("backup complete, starting bigquery ingestion");
log.warning("gsHandleOfBackup: " + gsHandleOfBackup);
AppIdentityCredential credential = new AppIdentityCredential(AnalysisConstants.SCOPES);
Bigquery bigquery = new Bigquery.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential).setApplicationName("Streak Logs").build();
String datatableSuffix = "";
if (exporterConfig.appendTimestampToDatatables()) {
datatableSuffix = Long.toString(timestamp);
}
else {
datatableSuffix = "";
}
if (!exporterConfig.appendTimestampToDatatables()) {
// we aren't appending the timestamps so delete the old tables if
// they exist
for (String kind : exporterConfig.getEntityKindsToExport()) {
boolean found = true;
try {
bigquery.tables().get(exporterConfig.getBigqueryProjectId(), exporterConfig.getBigqueryDatasetId(), kind).execute();
}
catch (IOException e) {
// table not found so don't need to do anything
found = false;
}
if (found) {
bigquery.tables().delete(exporterConfig.getBigqueryProjectId(), exporterConfig.getBigqueryDatasetId(), kind).execute();
}
}
}
// now create the ingestion
for (String kind : exporterConfig.getEntityKindsToExport()) {
String gsUrl = convertHandleToUrl(gsHandleOfBackup, kind);
log.warning("gsUrl: " + gsUrl);
Job job = new Job();
JobConfiguration config = new JobConfiguration();
JobConfigurationLoad loadConfig = new JobConfigurationLoad();
loadConfig.setSourceUris(Arrays.asList(gsUrl));
loadConfig.set("sourceFormat", "DATASTORE_BACKUP");
loadConfig.set("allowQuotedNewlines", true);
TableReference table = new TableReference();
table.setProjectId(exporterConfig.getBigqueryProjectId());
table.setDatasetId(exporterConfig.getBigqueryDatasetId());
table.setTableId(kind + datatableSuffix);
loadConfig.setDestinationTable(table);
config.setLoad(loadConfig);
job.setConfiguration(config);
Insert insert = bigquery.jobs().insert(exporterConfig.getBigqueryProjectId(), job);
JobReference jr = insert.execute().getJobReference();
log.warning("Uri: " + gsUrl + ", JobId: " + jr.getJobId());
}
}