Package com.asakusafw.runtime.directio

Examples of com.asakusafw.runtime.directio.DirectDataSourceRepository


        boolean succeed = true;
        if (fs.exists(commitMark) == false) {
            // FIXME cleanup
            return false;
        }
        DirectDataSourceRepository repo = getRepository();
        for (String containerPath : repo.getContainerPaths()) {
            String datasourceId = repo.getRelatedId(containerPath);
            try {
                DirectDataSource datasource = repo.getRelatedDataSource(containerPath);
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(executionId, datasourceId);
                datasource.commitTransactionOutput(context);
                datasource.cleanupTransactionOutput(context);
            } catch (IOException e) {
                succeed = false;
View Full Code Here


    public void loadRepository() throws Exception {
        Configuration conf = new Configuration();
        conf.set(key("testing"), MockHadoopDataSource.class.getName());
        conf.set(key("testing", "path"), "testing");
        conf.set(key("testing", "hello"), "world");
        DirectDataSourceRepository repo = HadoopDataSourceUtil.loadRepository(conf);
        DirectDataSource ds = repo.getRelatedDataSource("testing");
        assertThat(ds, instanceOf(MockHadoopDataSource.class));
        MockHadoopDataSource mock = (MockHadoopDataSource) ds;
        assertThat(mock.conf, is(notNullValue()));
        assertThat(mock.profile.getPath(), is("testing"));
    }
View Full Code Here

                        "Failed to delete commit mark (executionId={0}, path={1})",
                        executionId,
                        commitMark));
            }
        }
        DirectDataSourceRepository repo = getRepository();
        for (String containerPath : repo.getContainerPaths()) {
            String datasourceId = repo.getRelatedId(containerPath);
            try {
                DirectDataSource datasource = repo.getRelatedDataSource(containerPath);
                OutputTransactionContext context = HadoopDataSourceUtil.createContext(executionId, datasourceId);
                datasource.cleanupTransactionOutput(context);
            } catch (IOException e) {
                succeed = false;
                LOG.error(MessageFormat.format(
View Full Code Here

TOP

Related Classes of com.asakusafw.runtime.directio.DirectDataSourceRepository

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.