Package org.apache.falcon.entity.v0.process

Examples of org.apache.falcon.entity.v0.process.Cluster


        overlay = context.getUniqueOverlay();

        String filePath = context.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay);
        context.setCluster(filePath);

        final Cluster cluster = context.getCluster().getCluster();
        final String storageUrl = ClusterHelper.getStorageUrl(cluster);

        copyDataAndScriptsToHDFS(storageUrl);
        copyLibsToHDFS(cluster, storageUrl);
    }
View Full Code Here


        overlay = context.getUniqueOverlay();
        String filePath = context.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay);
        context.setCluster(filePath);

        final Cluster cluster = context.getCluster().getCluster();
        final String storageUrl = ClusterHelper.getStorageUrl(cluster);
        metastoreUrl = ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY).getEndpoint();

        copyDataAndScriptsToHDFS(storageUrl);
        copyLibsToHDFS(cluster, storageUrl);
View Full Code Here

    @BeforeMethod
    public void setUp() throws Exception {
        super.setup();

        ConfigurationStore store = ConfigurationStore.get();
        Cluster cluster = store.get(EntityType.CLUSTER, "corp");
        ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(hdfsUrl);
        ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY).setEndpoint("thrift://localhost:49083");
        fs = new Path(hdfsUrl).getFileSystem(new Configuration());
        fs.create(new Path(ClusterHelper.getLocation(cluster, "working"), "libext/PROCESS/ext.jar")).close();
View Full Code Here

        resource = this.getClass().getResource("/config/process/hive-process.xml");
        Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(resource);
        ConfigurationStore.get().publish(EntityType.PROCESS, process);

        Cluster cluster = ConfigurationStore.get().get(EntityType.CLUSTER, "corp");
        OozieProcessMapper mapper = new OozieProcessMapper(process);
        Path bundlePath = new Path("/tmp/seetharam", EntityUtil.getStagingPath(process));
        mapper.map(cluster, bundlePath);
        assertTrue(fs.exists(bundlePath));
View Full Code Here

        resource = this.getClass().getResource("/config/process/pig-process-table.xml");
        Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(resource);
        ConfigurationStore.get().publish(EntityType.PROCESS, process);

        Cluster cluster = ConfigurationStore.get().get(EntityType.CLUSTER, "corp");
        OozieProcessMapper mapper = new OozieProcessMapper(process);
        Path bundlePath = new Path("/", EntityUtil.getStagingPath(process));
        mapper.map(cluster, bundlePath);
        assertTrue(fs.exists(bundlePath));
View Full Code Here

        }
    }

    private WORKFLOWAPP initializeProcessMapper(Process process, String throttle, String timeout)
        throws Exception {
        Cluster cluster = ConfigurationStore.get().get(EntityType.CLUSTER, "corp");
        OozieProcessMapper mapper = new OozieProcessMapper(process);
        Path bundlePath = new Path("/", EntityUtil.getStagingPath(process));
        mapper.map(cluster, bundlePath);
        assertTrue(fs.exists(bundlePath));
View Full Code Here

        InputStream stream = new FileInputStream(filePath);
        Cluster cluster = (Cluster) EntityType.CLUSTER.getUnmarshaller().unmarshal(stream);
        Assert.assertNotNull(cluster);
        cluster.setColo("default")// validations will be ignored if not default & tests fail

        Interface anInterface = ClusterHelper.getInterface(cluster, interfacetype);
        anInterface.setEndpoint(endpoint);

        File tmpFile = context.getTempFile();
        EntityType.CLUSTER.getMarshaller().marshal(cluster, tmpFile);
        ClientResponse response = context.submitFileToFalcon(EntityType.CLUSTER, tmpFile.getAbsolutePath());
        context.assertFailure(response);
View Full Code Here

    @BeforeClass
    public void setup() throws Exception {
        InputStream stream = this.getClass().getResourceAsStream(CLUSTER_XML);
        clusterEntity = clusterParser.parse(stream);
        stream.close();
        Interface registry = ClusterHelper.getInterface(clusterEntity, Interfacetype.REGISTRY);
        registry.setEndpoint("thrift://localhost:9083");
        ConfigurationStore.get().publish(EntityType.CLUSTER, clusterEntity);

        stream = this.getClass().getResourceAsStream(FS_FEED_UNIFORM);
        fsFeedWithUniformStorage = feedParser.parse(stream);
        stream.close();
View Full Code Here

    @Test (dataProvider = "tableDataProvider")
    public void testGetTable(Feed feed, String dataPath) {
        org.apache.falcon.entity.v0.feed.Cluster feedCluster =
                FeedHelper.getCluster(feed, clusterEntity.getName());
        CatalogTable table = FeedHelper.getTable(feedCluster, feed);
        Assert.assertEquals(table.getUri(), dataPath);
    }
View Full Code Here

        overlay.put("tableUri", TABLE_URI);

        String filePath = context.overlayParametersOverTemplate("/hive-table-feed.xml", overlay);
        InputStream stream = new FileInputStream(filePath);
        FeedEntityParser parser = (FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
        Feed feed = parser.parse(stream);
        Assert.assertNotNull(feed);

        final LateArrival lateArrival = new LateArrival();
        lateArrival.setCutOff(new Frequency("4", Frequency.TimeUnit.hours));
        feed.setLateArrival(lateArrival);

        StringWriter stringWriter = new StringWriter();
        Marshaller marshaller = EntityType.FEED.getMarshaller();
        marshaller.marshal(feed, stringWriter);
        System.out.println(stringWriter.toString());
View Full Code Here

TOP

Related Classes of org.apache.falcon.entity.v0.process.Cluster

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.