Package org.apache.falcon.entity.v0.feed

Examples of org.apache.falcon.entity.v0.feed.Feed


                coord.getOutputEvents().getDataOut().get(0).getDataset());

        assertEquals(6, coord.getDatasets().getDatasetOrAsyncDataset().size());

        ConfigurationStore store = ConfigurationStore.get();
        Feed feed = store.get(EntityType.FEED, process.getInputs().getInputs().get(0).getFeed());
        SYNCDATASET ds = (SYNCDATASET) coord.getDatasets().getDatasetOrAsyncDataset().get(0);

        final org.apache.falcon.entity.v0.feed.Cluster feedCluster = feed.getClusters().getClusters().get(0);
        assertEquals(SchemaHelper.formatDateUTC(feedCluster.getValidity().getStart()), ds.getInitialInstance());
        assertEquals(feed.getTimezone().getID(), ds.getTimezone());
        assertEquals("${coord:" + feed.getFrequency().toString() + "}", ds.getFrequency());
        assertEquals("", ds.getDoneFlag());
        assertEquals(ds.getUriTemplate(),
                FeedHelper.createStorage(feedCluster, feed).getUriTemplate(LocationType.DATA));

        HashMap<String, String> props = new HashMap<String, String>();
View Full Code Here


    @Test (dataProvider = "secureOptions")
    public void testHiveProcessMapper(String secureOption) throws Exception {
        StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE, secureOption);

        URL resource = this.getClass().getResource("/config/feed/hive-table-feed.xml");
        Feed inFeed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(resource);
        ConfigurationStore.get().publish(EntityType.FEED, inFeed);

        resource = this.getClass().getResource("/config/feed/hive-table-feed-out.xml");
        Feed outFeed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(resource);
        ConfigurationStore.get().publish(EntityType.FEED, outFeed);

        resource = this.getClass().getResource("/config/process/hive-process.xml");
        Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(resource);
        ConfigurationStore.get().publish(EntityType.PROCESS, process);
View Full Code Here

    @Test (dataProvider = "secureOptions")
    public void testProcessMapperForTableStorage(String secureOption) throws Exception {
        StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE, secureOption);

        URL resource = this.getClass().getResource("/config/feed/hive-table-feed.xml");
        Feed inFeed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(resource);
        ConfigurationStore.get().publish(EntityType.FEED, inFeed);

        resource = this.getClass().getResource("/config/feed/hive-table-feed-out.xml");
        Feed outFeed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(resource);
        ConfigurationStore.get().publish(EntityType.FEED, outFeed);

        resource = this.getClass().getResource("/config/process/pig-process-table.xml");
        Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(resource);
        ConfigurationStore.get().publish(EntityType.PROCESS, process);
View Full Code Here

        if (process.getInputs() == null) {
            return storageType;
        }

        for (Input input : process.getInputs().getInputs()) {
            Feed feed = EntityUtil.getEntity(EntityType.FEED, input.getFeed());
            storageType = FeedHelper.getStorageType(feed, cluster);
            if (Storage.TYPE.TABLE == storageType) {
                break;
            }
        }
View Full Code Here

            validateHDFSPaths(process, clusterName);

            if (process.getInputs() != null) {
                for (Input input : process.getInputs().getInputs()) {
                    validateEntityExists(EntityType.FEED, input.getFeed());
                    Feed feed = ConfigurationStore.get().get(EntityType.FEED, input.getFeed());
                    CrossEntityValidations.validateFeedDefinedForCluster(feed, clusterName);
                    CrossEntityValidations.validateFeedRetentionPeriod(input.getStart(), feed, clusterName);
                    CrossEntityValidations.validateInstanceRange(process, input, feed);
                    validateInputPartition(input, feed);
                    validateOptionalInputsForTableStorage(feed, input);
                }
            }

            if (process.getOutputs() != null) {
                for (Output output : process.getOutputs().getOutputs()) {
                    validateEntityExists(EntityType.FEED, output.getFeed());
                    Feed feed = ConfigurationStore.get().get(EntityType.FEED, output.getFeed());
                    CrossEntityValidations.validateFeedDefinedForCluster(feed, clusterName);
                    CrossEntityValidations.validateInstance(process, output, feed);
                }
            }
        }
View Full Code Here

            if (!feeds.keySet().contains(lp.getInput())) {
                throw new ValidationException("Late Input: " + lp.getInput() + " is not specified in the inputs");
            }

            try {
                Feed feed = ConfigurationStore.get().get(EntityType.FEED, feeds.get(lp.getInput()));
                if (feed.getLateArrival() == null) {
                    throw new ValidationException(
                            "Late Input feed: " + lp.getInput() + " is not configured with late arrival cut-off");
                }
            } catch (FalconException e) {
                throw new ValidationException(e);
View Full Code Here

        switch (entity.getEntityType()) {
        case CLUSTER:
            return entity;

        case FEED:
            Feed feed = (Feed) entity.copy();
            Cluster feedCluster = FeedHelper.getCluster(feed, clusterName);
            Iterator<Cluster> itr = feed.getClusters().getClusters().iterator();
            while (itr.hasNext()) {
                Cluster cluster = itr.next();
                //In addition to retaining the required clster, retain the sources clusters if this is the target
                // cluster
                //1. Retain cluster if cluster n
View Full Code Here

        case CLUSTER:
            clusters.add(entity.getName());
            break;

        case FEED:
            Feed feed = (Feed) entity;
            for (Cluster cluster : feed.getClusters().getClusters()) {
                clusters.add(cluster.getName());
            }
            break;

        case PROCESS:
View Full Code Here

        return propertiesMap;
    }

    private void addOptionalInputProperties(Properties properties, Input in, String clusterName)
        throws FalconException {
        Feed feed = EntityUtil.getEntity(EntityType.FEED, in.getFeed());
        org.apache.falcon.entity.v0.feed.Cluster cluster = FeedHelper.getCluster(feed, clusterName);
        String inName = in.getName();
        properties.put(inName + ".frequency", String.valueOf(feed.getFrequency().getFrequency()));
        properties.put(inName + ".freq_timeunit", mapToCoordTimeUnit(feed.getFrequency().getTimeUnit()).name());
        properties.put(inName + ".timezone", feed.getTimezone().getID());
        properties.put(inName + ".end_of_duration", Timeunit.NONE.name());
        properties.put(inName + ".initial-instance", SchemaHelper.formatDateUTC(cluster.getValidity().getStart()));
        properties.put(inName + ".done-flag", "notused");

        String locPath = FeedHelper.createStorage(clusterName, feed)
View Full Code Here

        List<String> inputFeeds = new ArrayList<String>();
        List<String> inputPaths = new ArrayList<String>();
        List<String> inputFeedStorageTypes = new ArrayList<String>();
        for (Input input : process.getInputs().getInputs()) {
            Feed feed = EntityUtil.getEntity(EntityType.FEED, input.getFeed());
            Storage storage = FeedHelper.createStorage(cluster, feed);

            if (!input.isOptional()) {
                if (coord.getDatasets() == null) {
                    coord.setDatasets(new DATASETS());
                }
                if (coord.getInputEvents() == null) {
                    coord.setInputEvents(new INPUTEVENTS());
                }

                SYNCDATASET syncdataset = createDataSet(feed, cluster, storage, input.getName(), LocationType.DATA);
                coord.getDatasets().getDatasetOrAsyncDataset().add(syncdataset);

                DATAIN datain = createDataIn(input);
                coord.getInputEvents().getDataIn().add(datain);
            }

            String inputExpr = null;
            if (storage.getType() == Storage.TYPE.FILESYSTEM) {
                inputExpr = getELExpression("dataIn('" + input.getName() + "', '" + input.getPartition() + "')");
                props.put(input.getName(), inputExpr);
            } else if (storage.getType() == Storage.TYPE.TABLE) {
                inputExpr = "${coord:dataIn('" + input.getName() + "')}";
                propagateCatalogTableProperties(input, (CatalogStorage) storage, props);
            }

            inputFeeds.add(feed.getName());
            inputPaths.add(inputExpr);
            inputFeedStorageTypes.add(storage.getType().name());
        }

        propagateLateDataProperties(inputFeeds, inputPaths, inputFeedStorageTypes, props);
View Full Code Here

TOP

Related Classes of org.apache.falcon.entity.v0.feed.Feed

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.