Package com.asakusafw.runtime.directio

Examples of com.asakusafw.runtime.directio.DirectDataSource


                            taskContext.getJobID(),
                            taskContext.getTaskAttemptID()));
                }
                OutputAttemptContext context = HadoopDataSourceUtil.createContext(taskContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.cleanupAttemptOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio task cleanup: datasource={0} (job={1}, task={2})",
                            id,
                            taskContext.getJobID(),
View Full Code Here


                            id,
                            jobContext.getJobID()));
                }
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(jobContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.setupTransactionOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio job setup: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()), e);
View Full Code Here

                    continue;
                }
                String id = repository.getRelatedId(spec.basePath);
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(jobContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(spec.basePath);
                    String basePath = repository.getComponentPath(spec.basePath);
                    for (String pattern : spec.deletePatterns) {
                        FilePattern resources = FilePattern.compile(pattern);
                        if (LOG.isInfoEnabled()) {
                            LOG.info(MessageFormat.format(
                                    "Deleting output: datasource={0}, basePath={1}, pattern={2}",
                                    id,
                                    basePath,
                                    pattern));
                        }
                        boolean succeed = repo.delete(basePath, resources, true, context.getCounter());
                        if (LOG.isDebugEnabled()) {
                            LOG.debug(MessageFormat.format(
                                    "Deleted output (succeed={3}): datasource={0}, basePath={1}, pattern={2}",
                                    id,
                                    basePath,
View Full Code Here

                            id,
                            jobContext.getJobID()));
                }
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(jobContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.commitTransactionOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio job rollforward: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()), e);
View Full Code Here

                            id,
                            jobContext.getJobID()));
                }
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(jobContext, id);
                try {
                    DirectDataSource repo = repository.getRelatedDataSource(containerPath);
                    repo.cleanupTransactionOutput(context);
                } catch (IOException e) {
                    LOG.error(MessageFormat.format(
                            "Failed directio job cleanup: datasource={0} (job={1})",
                            id,
                            jobContext.getJobID()), e);
View Full Code Here

        Configuration conf = new Configuration();
        conf.set(key("testing"), MockHadoopDataSource.class.getName());
        conf.set(key("testing", "path"), "testing");
        conf.set(key("testing", "hello"), "world");
        DirectDataSourceRepository repo = HadoopDataSourceUtil.loadRepository(conf);
        DirectDataSource ds = repo.getRelatedDataSource("testing");
        assertThat(ds, instanceOf(MockHadoopDataSource.class));
        MockHadoopDataSource mock = (MockHadoopDataSource) ds;
        assertThat(mock.conf, is(notNullValue()));
        assertThat(mock.profile.getPath(), is("testing"));
    }
View Full Code Here

        }
        DirectDataSourceRepository repo = getRepository();
        for (String containerPath : repo.getContainerPaths()) {
            String datasourceId = repo.getRelatedId(containerPath);
            try {
                DirectDataSource datasource = repo.getRelatedDataSource(containerPath);
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(executionId, datasourceId);
                datasource.commitTransactionOutput(context);
                datasource.cleanupTransactionOutput(context);
            } catch (IOException e) {
                succeed = false;
                LOG.error(MessageFormat.format(
                        "Failed to apply transaction (datastoreId={0}, executionId={1})",
                        datasourceId,
View Full Code Here

        }
        DirectDataSourceRepository repo = getRepository();
        for (String containerPath : repo.getContainerPaths()) {
            String datasourceId = repo.getRelatedId(containerPath);
            try {
                DirectDataSource datasource = repo.getRelatedDataSource(containerPath);
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(executionId, datasourceId);
                datasource.cleanupTransactionOutput(context);
            } catch (IOException e) {
                succeed = false;
                LOG.error(MessageFormat.format(
                        "Failed to abort transaction (datastoreId={0}, executionId={1})",
                        datasourceId,
View Full Code Here

TOP

Related Classes of com.asakusafw.runtime.directio.DirectDataSource

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.