Package org.apache.falcon.entity.v0.cluster

Examples of org.apache.falcon.entity.v0.cluster.Cluster


        StartupProperties.get().setProperty(CatalogServiceFactory.CATALOG_SERVICE, "thrift://localhost:9083");
        Assert.assertTrue(CatalogServiceFactory.isEnabled());

        InputStream stream = this.getClass().getResourceAsStream("/config/cluster/cluster-no-registry.xml");
        Cluster cluster = parser.parse(stream);

        Interface catalog = ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY);
        Assert.assertNull(catalog);

        StartupProperties.get().remove(CatalogServiceFactory.CATALOG_SERVICE);
View Full Code Here


        // disable catalog service
        StartupProperties.get().remove(CatalogServiceFactory.CATALOG_SERVICE);
        Assert.assertFalse(CatalogServiceFactory.isEnabled());

        InputStream stream = this.getClass().getResourceAsStream("/config/cluster/cluster-bad-registry.xml");
        Cluster cluster = parser.parse(stream);

        Interface catalog = ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY);
        Assert.assertEquals(catalog.getEndpoint(), "Hcat");
        Assert.assertEquals(catalog.getVersion(), "0.1");
    }
View Full Code Here

     * @throws FalconException
     */
    @Test
    public void testClusterTags() throws FalconException {
        InputStream stream = this.getClass().getResourceAsStream(CLUSTER_XML);
        Cluster cluster = parser.parse(stream);

        final String tags = cluster.getTags();
        Assert.assertEquals("consumer=consumer@xyz.com, owner=producer@xyz.com, department=forecasting", tags);

        final String[] keys = {"consumer", "owner", "department", };
        final String[] values = {"consumer@xyz.com", "producer@xyz.com", "forecasting", };

View Full Code Here

        Map<String, String> overlay = sourceContext.getUniqueOverlay();
        String sourceFilePath = sourceContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
        sourceContext.setCluster(sourceFilePath);

        final Cluster sourceCluster = sourceContext.getCluster().getCluster();
        String sourceStorageUrl = ClusterHelper.getStorageUrl(sourceCluster);

        // copyTestDataToHDFS
        final String sourcePath = sourceStorageUrl + "/falcon/test/input/" + PARTITION_VALUE;
        FSUtils.copyResourceToHDFS("/apps/data/data.txt", "data.txt", sourcePath);

        sourceMetastoreUrl = ClusterHelper.getInterface(sourceCluster, Interfacetype.REGISTRY).getEndpoint();
        setupHiveMetastore(sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME);
        HiveTestUtils.loadData(sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME, sourcePath,
                PARTITION_VALUE);

        String targetFilePath = targetContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
        targetContext.setCluster(targetFilePath);

        final Cluster targetCluster = targetContext.getCluster().getCluster();
        targetMetastoreUrl = ClusterHelper.getInterface(targetCluster, Interfacetype.REGISTRY).getEndpoint();
        setupHiveMetastore(targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME);

        copyLibsToHDFS(targetCluster);
    }
View Full Code Here

        TestContext.executeWithURL("entity -delete -type cluster -name primary-cluster");
        TestContext.executeWithURL("entity -delete -type cluster -name bcp-cluster");
    }

    private void addPartitionToTarget() throws Exception {
        final Cluster targetCluster = targetContext.getCluster().getCluster();
        String targetStorageUrl = ClusterHelper.getStorageUrl(targetCluster);

        // copyTestDataToHDFS
        final String targetPath = targetStorageUrl + "/falcon/test/input/" + PARTITION_VALUE;
        FSUtils.copyResourceToHDFS("/apps/data/data.txt", "data.txt", targetPath);
View Full Code Here

        Unmarshaller unmarshaller = type.getUnmarshaller();
        ConfigurationStore store = ConfigurationStore.get();
        store.remove(type, name);
        switch (type) {
        case CLUSTER:
            Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(CLUSTER_XML));
            cluster.setName(name);
            ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(conf.get("fs.default.name"));
            store.publish(type, cluster);
            break;

        case FEED:
View Full Code Here

        String filePath = context.overlayParametersOverTemplate(
                TestContext.CLUSTER_TEMPLATE, context.getUniqueOverlay());
        context.setCluster(filePath);

        Cluster cluster = context.getCluster().getCluster();
        fs = FileSystem.get(ClusterHelper.getConfiguration(cluster));
        storageUrl = ClusterHelper.getStorageUrl(cluster);
        metastoreUrl = ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY).getEndpoint();

        copyDataAndScriptsToHDFS();
View Full Code Here

    public FileSystem getFileSystem() throws IOException {
        return FileSystem.get(conf);
    }

    private void buildClusterObject(String name) {
        clusterEntity = new Cluster();
        clusterEntity.setName(name);
        clusterEntity.setColo("local");
        clusterEntity.setDescription("Embeded cluster: " + name);

        Interfaces interfaces = new Interfaces();
View Full Code Here

        Map<String, String> overlay = sourceContext.getUniqueOverlay();
        String sourceFilePath = TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
        sourceContext.setCluster(sourceFilePath);

        final Cluster sourceCluster = sourceContext.getCluster().getCluster();
        String sourceStorageUrl = ClusterHelper.getStorageUrl(sourceCluster);

        // copyTestDataToHDFS
        final String sourcePath = sourceStorageUrl + "/falcon/test/input/" + PARTITION_VALUE;
        FSUtils.copyResourceToHDFS("/apps/data/data.txt", "data.txt", sourcePath);

        sourceMetastoreUrl = ClusterHelper.getInterface(sourceCluster, Interfacetype.REGISTRY).getEndpoint();
        setupHiveMetastore(sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME);
        HiveTestUtils.loadData(sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME, sourcePath,
                PARTITION_VALUE);

        String targetFilePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
        targetContext.setCluster(targetFilePath);

        final Cluster targetCluster = targetContext.getCluster().getCluster();
        targetMetastoreUrl = ClusterHelper.getInterface(targetCluster, Interfacetype.REGISTRY).getEndpoint();
        setupHiveMetastore(targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME);

        copyLibsToHDFS(targetCluster);
    }
View Full Code Here

        TestContext.executeWithURL("entity -delete -type cluster -name primary-cluster");
        TestContext.executeWithURL("entity -delete -type cluster -name bcp-cluster");
    }

    private void addPartitionToTarget() throws Exception {
        final Cluster targetCluster = targetContext.getCluster().getCluster();
        String targetStorageUrl = ClusterHelper.getStorageUrl(targetCluster);

        // copyTestDataToHDFS
        final String targetPath = targetStorageUrl + "/falcon/test/input/" + PARTITION_VALUE;
        FSUtils.copyResourceToHDFS("/apps/data/data.txt", "data.txt", targetPath);
View Full Code Here

TOP

Related Classes of org.apache.falcon.entity.v0.cluster.Cluster

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.