Package com.asakusafw.runtime.directio

Examples of com.asakusafw.runtime.directio.OutputAttemptContext


        int index = 0;
        for (String path : repo.getContainerPaths()) {
            String id = repo.getRelatedId(path);
            DirectDataSource ds = repo.getRelatedDataSource(path);
            OutputTransactionContext txContext = HadoopDataSourceUtil.createContext(executionId, id);
            OutputAttemptContext aContext = new OutputAttemptContext(
                    txContext.getTransactionId(),
                    String.valueOf(index),
                    txContext.getOutputId(),
                    new Counter());
View Full Code Here


            throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$
        }
        if (description == null) {
            throw new IllegalArgumentException("description must not be null"); //$NON-NLS-1$
        }
        final OutputAttemptContext outputContext = createOutputContext();
        DataFormat<T> format = createFormat(dataType, description.getFormat());
        String outputPath = toOutputName(description.getResourcePattern());
        LOG.info("Opening {}/{} for output (id={}, description={})", new Object[] {
                fullPath,
                outputPath,
                id,
                description.getClass().getName(),
        });
        DataDefinition<T> definition = SimpleDataDefinition.newInstance(dataType, format);
        try {
            dataSource.setupTransactionOutput(outputContext.getTransactionContext());
            dataSource.setupAttemptOutput(outputContext);
            Counter counter = new Counter();
            final ModelOutput<T> output = dataSource.openOutput(
                    outputContext, definition, basePath, outputPath, counter);
            return new ModelOutput<T>() {
                @Override
                public void write(T model) throws IOException {
                    output.write(model);
                }
                @Override
                public void close() throws IOException {
                    output.close();
                    try {
                        dataSource.commitAttemptOutput(outputContext);
                        dataSource.cleanupAttemptOutput(outputContext);
                        dataSource.commitTransactionOutput(outputContext.getTransactionContext());
                        dataSource.cleanupTransactionOutput(outputContext.getTransactionContext());
                    } catch (InterruptedException e) {
                        throw (IOException) new InterruptedIOException("interrupted").initCause(e);
                    }
                }
            };
View Full Code Here

    }

    private OutputAttemptContext createOutputContext() {
        String tx = UUID.randomUUID().toString();
        String attempt = UUID.randomUUID().toString();
        return new OutputAttemptContext(tx, attempt, id, new Counter());
    }
View Full Code Here

        if (datasourceId == null) {
            throw new IllegalArgumentException("datasourceId must not be null"); //$NON-NLS-1$
        }
        String transactionId = getTransactionId(context, datasourceId);
        String attemptId = getAttemptId(context, datasourceId);
        return new OutputAttemptContext(transactionId, attemptId, datasourceId, createCounter(context));
    }
View Full Code Here

            VariableTable variables = new VariableTable(VariableTable.RedefineStrategy.IGNORE);
            variables.defineVariables(arguments);

            String path = variables.parse(rawBasePath, false);
            String id = repository.getRelatedId(path);
            OutputAttemptContext outputContext = HadoopDataSourceUtil.createContext(context, id);
            DataFormat<? super T> format = ReflectionUtils.newInstance(dataFormatClass, context.getConfiguration());
            DirectDataSource datasource = repository.getRelatedDataSource(path);
            String basePath = repository.getComponentPath(path);
            String unresolvedResourcePath = rawResourcePath.replaceAll(
                    Pattern.quote("*"),
                    String.format("%04d", context.getTaskAttemptID().getTaskID().getId()));
            String resourcePath = variables.parse(unresolvedResourcePath);
            DataDefinition<? super T> definition = SimpleDataDefinition.newInstance(dataType, format);

            if (log.isDebugEnabled()) {
                log.debug(MessageFormat.format(
                        "Open mapper output (id={0}, basePath={1}, resourcePath={2})",
                        id,
                        basePath,
                        resourcePath));
            }

            int records = 0;
            ModelOutput<? super T> output = datasource.openOutput(
                    outputContext,
                    definition,
                    basePath,
                    resourcePath,
                    outputContext.getCounter());
            try {
                do {
                    output.write(context.getCurrentValue());
                    records++;
                } while (context.nextKeyValue());
            } finally {
                if (log.isDebugEnabled()) {
                    log.debug(MessageFormat.format(
                            "Start cleanup directly output Mapper {0}@{1}",
                            getClass().getName(),
                            context.getTaskAttemptID()));
                }
                output.close();
            }
            org.apache.hadoop.mapreduce.Counter recordCounter = JobCompatibility.getTaskOutputRecordCounter(context);
            recordCounter.increment(records);
            context.getCounter(COUNTER_GROUP, id + ".files").increment(1);
            context.getCounter(COUNTER_GROUP, id + ".records").increment(records);
            context.getCounter(COUNTER_GROUP, id + ".size").increment(outputContext.getCounter().get());
        }
    }
View Full Code Here

            Iterable<AbstractDirectOutputValue> values,
            Context context) throws IOException , InterruptedException {
        DirectOutputGroup group = (DirectOutputGroup) key.getGroupObject().getObject();
        String path = variables.parse(group.getPath(), false);
        String id = repository.getRelatedId(path);
        OutputAttemptContext outputContext = HadoopDataSourceUtil.createContext(context, id);
        DataDefinition definition = SimpleDataDefinition.newInstance(
                group.getDataType(),
                configure(context, group.getFormat()));
        DirectDataSource datasource = repository.getRelatedDataSource(path);
        String basePath = repository.getComponentPath(path);
View Full Code Here

                conf,
                "testing",
                "testing",
                new Path(mapping.toURI()),
                new Path(temporary.toURI()));
        context = new OutputAttemptContext("tx", "atmpt", profile.getId(), new Counter());
    }
View Full Code Here

        int index = 0;
        for (String path : repo.getContainerPaths()) {
            String id = repo.getRelatedId(path);
            DirectDataSource ds = repo.getRelatedDataSource(path);
            OutputTransactionContext txContext = HadoopDataSourceUtil.createContext(executionId, id);
            OutputAttemptContext aContext = new OutputAttemptContext(
                    txContext.getTransactionId(),
                    String.valueOf(index),
                    txContext.getOutputId(),
                    new Counter());
View Full Code Here

                            "Start directio task setup for datasource: datasource={0} job={1}, task={2}",
                            id,
                            taskContext.getJobID(),
                            taskContext.getTaskAttemptID()));
                }
                OutputAttemptContext context = HadoopDataSourceUtil.createContext(taskContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.setupAttemptOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio task setup: datasource={0} (job={1}, task={2})",
                            id,
                            taskContext.getJobID(),
                            taskContext.getTaskAttemptID()), e);
                    throw e;
                } catch (InterruptedException e) {
                    throw (IOException) new InterruptedIOException(MessageFormat.format(
                            "Interrupted while setup attempt: {0}, {1} (path={2})",
                            context.getTransactionId(),
                            context.getAttemptId(),
                            containerPath)).initCause(e);
                }
                context.getCounter().add(1);
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug(MessageFormat.format(
                        "Finish directio task setup: job={0}, task={1}",
                        taskContext.getJobID(),
View Full Code Here

                            "Start directio task commit for datasource: datasource={0} job={1}, task={2}",
                            id,
                            taskContext.getJobID(),
                            taskContext.getTaskAttemptID()));
                }
                OutputAttemptContext context = HadoopDataSourceUtil.createContext(taskContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.commitAttemptOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio task commit: datasource={0} (job={1}, task={2})",
                            id,
                            taskContext.getJobID(),
                            taskContext.getTaskAttemptID()), e);
                    throw e;
                } catch (InterruptedException e) {
                    throw (IOException) new InterruptedIOException(MessageFormat.format(
                            "Interrupted while commit attempt: {0}, {1} (path={2})",
                            context.getTransactionId(),
                            context.getAttemptId(),
                            containerPath)).initCause(e);
                } catch (RuntimeException e) {
                    LOG.fatal("TASK COMMIT FAILED", e);
                    throw e;
                }
                context.getCounter().add(1);
            }
            doCleanupTask(taskContext);
            if (LOG.isDebugEnabled()) {
                LOG.debug(MessageFormat.format(
                        "Finish directio task commit: job={0}, task={1}",
View Full Code Here

TOP

Related Classes of com.asakusafw.runtime.directio.OutputAttemptContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.