Package com.asakusafw.runtime.directio

Examples of com.asakusafw.runtime.directio.OutputTransactionContext


        fs.create(cmPath).close();
        int index = 0;
        for (String path : repo.getContainerPaths()) {
            String id = repo.getRelatedId(path);
            DirectDataSource ds = repo.getRelatedDataSource(path);
            OutputTransactionContext txContext = HadoopDataSourceUtil.createContext(executionId, id);
            OutputAttemptContext aContext = new OutputAttemptContext(
                    txContext.getTransactionId(),
                    String.valueOf(index),
                    txContext.getOutputId(),
                    new Counter());

            ds.setupTransactionOutput(txContext);
            ds.setupAttemptOutput(aContext);
            ModelOutput<StringBuilder> output = ds.openOutput(
View Full Code Here


        }
        if (datasourceId == null) {
            throw new IllegalArgumentException("datasourceId must not be null"); //$NON-NLS-1$
        }
        String transactionId = getTransactionId(context, datasourceId);
        return new OutputTransactionContext(transactionId, datasourceId, createCounter(context));
    }
View Full Code Here

        }
        if (datasourceId == null) {
            throw new IllegalArgumentException("datasourceId must not be null"); //$NON-NLS-1$
        }
        String transactionId = getTransactionId(executionId);
        return new OutputTransactionContext(transactionId, datasourceId, new Counter());
    }
View Full Code Here

        fs.create(cmPath).close();
        int index = 0;
        for (String path : repo.getContainerPaths()) {
            String id = repo.getRelatedId(path);
            DirectDataSource ds = repo.getRelatedDataSource(path);
            OutputTransactionContext txContext = HadoopDataSourceUtil.createContext(executionId, id);
            OutputAttemptContext aContext = new OutputAttemptContext(
                    txContext.getTransactionId(),
                    String.valueOf(index),
                    txContext.getOutputId(),
                    new Counter());

            ds.setupTransactionOutput(txContext);
            ds.setupAttemptOutput(aContext);
            ModelOutput<StringBuilder> output = ds.openOutput(
View Full Code Here

                    LOG.debug(MessageFormat.format(
                            "Start directio job setup: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()));
                }
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(jobContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.setupTransactionOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio job setup: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()), e);
                    throw e;
                } catch (InterruptedException e) {
                    throw (IOException) new InterruptedIOException(MessageFormat.format(
                            "Interrupted while setup transaction: {0}, (path={1})",
                            context.getTransactionId(),
                            containerPath)).initCause(e);
                }
                context.getCounter().add(1);
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug(MessageFormat.format(
                        "Finish directio job setup: job={0}",
                        jobContext.getJobID()));
View Full Code Here

            for (OutputSpec spec : outputSpecs) {
                if (spec.deletePatterns.isEmpty()) {
                    continue;
                }
                String id = repository.getRelatedId(spec.basePath);
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(jobContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(spec.basePath);
                    String basePath = repository.getComponentPath(spec.basePath);
                    for (String pattern : spec.deletePatterns) {
                        FilePattern resources = FilePattern.compile(pattern);
                        if (LOG.isInfoEnabled()) {
                            LOG.info(MessageFormat.format(
                                    "Deleting output: datasource={0}, basePath={1}, pattern={2}",
                                    id,
                                    basePath,
                                    pattern));
                        }
                        boolean succeed = repo.delete(basePath, resources, true, context.getCounter());
                        if (LOG.isDebugEnabled()) {
                            LOG.debug(MessageFormat.format(
                                    "Deleted output (succeed={3}): datasource={0}, basePath={1}, pattern={2}",
                                    id,
                                    basePath,
View Full Code Here

                    LOG.debug(MessageFormat.format(
                            "Start directio job rollforward: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()));
                }
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(jobContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.commitTransactionOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio job rollforward: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()), e);
                    throw e;
                } catch (InterruptedException e) {
                    throw (IOException) new InterruptedIOException(MessageFormat.format(
                            "Interrupted while commit transaction: {0}, (path={1})",
                            context.getTransactionId(),
                            containerPath)).initCause(e);
                }
                context.getCounter().add(1);
            }
        }
View Full Code Here

                    LOG.debug(MessageFormat.format(
                            "Start directio job cleanup: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()));
                }
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(jobContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.cleanupTransactionOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio job cleanup: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()), e);
                    throw e;
                } catch (InterruptedException e) {
                    throw (IOException) new InterruptedIOException(MessageFormat.format(
                            "Interrupted while cleanup transaction: {0}, (path={1})",
                            context.getTransactionId(),
                            containerPath)).initCause(e);
                }
                context.getCounter().add(1);
            }
        }
View Full Code Here

        DirectDataSourceRepository repo = getRepository();
        for (String containerPath : repo.getContainerPaths()) {
            String datasourceId = repo.getRelatedId(containerPath);
            try {
                DirectDataSource datasource = repo.getRelatedDataSource(containerPath);
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(executionId, datasourceId);
                datasource.commitTransactionOutput(context);
                datasource.cleanupTransactionOutput(context);
            } catch (IOException e) {
                succeed = false;
                LOG.error(MessageFormat.format(
View Full Code Here

        DirectDataSourceRepository repo = getRepository();
        for (String containerPath : repo.getContainerPaths()) {
            String datasourceId = repo.getRelatedId(containerPath);
            try {
                DirectDataSource datasource = repo.getRelatedDataSource(containerPath);
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(executionId, datasourceId);
                datasource.cleanupTransactionOutput(context);
            } catch (IOException e) {
                succeed = false;
                LOG.error(MessageFormat.format(
                        "Failed to abort transaction (datastoreId={0}, executionId={1})",
View Full Code Here

TOP

Related Classes of com.asakusafw.runtime.directio.OutputTransactionContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.