Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
filePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
HCatPartition sourcePartition = HiveTestUtils.getPartition(
sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME, "ds", PARTITION_VALUE);
Assert.assertNotNull(sourcePartition);
filePath = TestContext.overlayParametersOverTemplate("/table/customer-table-replicating-feed.xml", overlay);
Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
// wait until the workflow job completes
WorkflowJob jobInfo = OozieTestUtils.getWorkflowJob(targetContext.getCluster().getCluster(),
OozieClient.FILTER_NAME + "=FALCON_FEED_REPLICATION_" + feedName);
Assert.assertEquals(jobInfo.getStatus(), WorkflowJob.Status.SUCCEEDED);
// verify if the partition on the target exists
HCatPartition targetPartition = HiveTestUtils.getPartition(
targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME, "ds", PARTITION_VALUE);
Assert.assertNotNull(targetPartition);
InstancesResult response = targetContext.getService().path("api/instance/running/feed/" + feedName)
.header("Cookie", targetContext.getAuthenticationToken())