Package com.asakusafw.runtime.directio

Examples of com.asakusafw.runtime.directio.Counter


        if (description == null) {
            throw new IllegalArgumentException("description must not be null"); //$NON-NLS-1$
        }
        DataFormat<T> format = createFormat(dataType, description.getFormat());
        final DataDefinition<T> definition = SimpleDataDefinition.newInstance(dataType, format);
        final Counter counter = new Counter();
        try {
            FilePattern pattern = toInputPattern(description.getResourcePattern());
            LOG.info("Opening {}/{} for output (id={}, description={})", new Object[] {
                    fullPath,
                    pattern,
View Full Code Here


    }

    private OutputAttemptContext createOutputContext() {
        String tx = UUID.randomUUID().toString();
        String attempt = UUID.randomUUID().toString();
        return new OutputAttemptContext(tx, attempt, id, new Counter());
    }
View Full Code Here

            repository = HadoopDataSourceUtil.loadRepository(getConf());
        }
        String basePath = repository.getComponentPath(path);
        DirectDataSource source = repository.getRelatedDataSource(path);
        for (FilePattern pattern : patterns) {
            source.delete(basePath, pattern, recursive, new Counter());
        }
        return 0;
    }
View Full Code Here

            repository = HadoopDataSourceUtil.loadRepository(getConf());
        }
        String basePath = repository.getComponentPath(path);
        DirectDataSource source = repository.getRelatedDataSource(path);
        for (FilePattern pattern : patterns) {
            List<ResourceInfo> list = source.list(basePath, pattern, new Counter());
            for (ResourceInfo info : list) {
                System.out.println(info.getPath());
            }
        }
        return 0;
View Full Code Here

        HadoopFileFormat<Object> unsafe = unsafe(support);

        model.set("value", new Text("Hello, world!"));

        ModelOutput<Object> writer = unsafe.createOutput(model.unwrap().getClass(),  fs, path, new Counter());
        try {
            writer.write(model.unwrap());
        } finally {
            writer.close();
        }

        ModelInput<Object> reader = unsafe.createInput(
                model.unwrap().getClass(), fs, path, 0, fs.getFileStatus(path).getLen(), new Counter());
        try {
            Object buffer = loaded.newModel("Simple").unwrap();
            assertThat(reader.readTo(buffer), is(true));
            assertThat(buffer, is(buffer));
            assertThat(reader.readTo(buffer), is(false));
View Full Code Here

        }
        if (datasourceId == null) {
            throw new IllegalArgumentException("datasourceId must not be null"); //$NON-NLS-1$
        }
        String transactionId = getTransactionId(executionId);
        return new OutputTransactionContext(transactionId, datasourceId, new Counter());
    }
View Full Code Here

        if (context instanceof Progressable) {
            return new ProgressableCounter((Progressable) context);
        } else if (context instanceof org.apache.hadoop.mapred.JobContext) {
            return new ProgressableCounter(((org.apache.hadoop.mapred.JobContext) context).getProgressible());
        } else {
            return new Counter();
        }
    }
View Full Code Here

                configure(context, group.getFormat()));
        DirectDataSource datasource = repository.getRelatedDataSource(path);
        String basePath = repository.getComponentPath(path);
        String resourcePath = variables.parse(group.getResourcePath());

        Counter counter = new Counter();
        ModelOutput output = datasource.openOutput(outputContext, definition, basePath, resourcePath, counter);

        long records = 0;
        try {
            for (Union union : values) {
                Object object = union.getObject();
                output.write(object);
                records++;
            }
        } finally {
            output.close();
        }
        recordCounter.increment(records);
        context.getCounter(COUNTER_GROUP, id + ".files").increment(1);
        context.getCounter(COUNTER_GROUP, id + ".records").increment(records);
        context.getCounter(COUNTER_GROUP, id + ".size").increment(counter.get());
    }
View Full Code Here

    @Override
    public void setupAttemptOutput(OutputAttemptContext context) throws IOException, InterruptedException {
        if (context == null) {
            throw new IllegalArgumentException("context must not be null"); //$NON-NLS-1$
        }
        Counter counter = context.getCounter();
        heartbeat.register(counter);
        try {
            entity.setupAttemptOutput(context);
        } finally {
            heartbeat.unregister(counter);
View Full Code Here

    @Override
    public void commitAttemptOutput(OutputAttemptContext context) throws IOException, InterruptedException {
        if (context == null) {
            throw new IllegalArgumentException("context must not be null"); //$NON-NLS-1$
        }
        Counter counter = context.getCounter();
        heartbeat.register(counter);
        try {
            entity.commitAttemptOutput(context);
        } finally {
            heartbeat.unregister(counter);
View Full Code Here

TOP

Related Classes of com.asakusafw.runtime.directio.Counter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.