Package com.asakusafw.bulkloader.bean

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean


    @Test
    public void contents() throws Exception {
        ImportBean bean = createBean();
        Map<String, ImportTargetTableBean> targetTable = new HashMap<String, ImportTargetTableBean>();

        ImportTargetTableBean tb1 = new ImportTargetTableBean();
        tb1.setCacheId(null);
        tb1.setDfsFilePath("tb1");
        tb1.setImportTargetType(ImportTarget1.class);
        tb1.setImportTargetColumns(Arrays.asList("A"));
        tb1.setSearchCondition("");
        targetTable.put("__TG_TEST1", tb1);

        bean.setTargetTable(targetTable);
        ImportProtocolDecide service = new ImportProtocolDecide();
        service.execute(bean);

        assertThat(tb1.getImportProtocol().getKind(), is(FileProtocol.Kind.CONTENT));
        assertThat(tb1.getStartTimestamp(), is(nullValue()));
    }
View Full Code Here


    public void create_cache() throws Exception {
        ImportBean bean = createBean();

        Map<String, ImportTargetTableBean> targetTable = new HashMap<String, ImportTargetTableBean>();

        ImportTargetTableBean tb1 = new ImportTargetTableBean();
        tb1.setCacheId("tb1");
        tb1.setDfsFilePath("tb1");
        tb1.setImportTargetType(ImportTarget1.class);
        tb1.setImportTargetColumns(Arrays.asList("A"));
        tb1.setSearchCondition("");
        targetTable.put("__TG_TEST1", tb1);

        bean.setTargetTable(targetTable);
        ImportProtocolDecide service = new ImportProtocolDecide() {
            @Override
            protected Map<String, CacheInfo> collectRemoteCacheInfo(ImportBean _)
                    throws BulkLoaderSystemException {
                return Collections.emptyMap();
            }
        };
        service.execute(bean);

        assertThat(tb1.getImportProtocol().getKind(), is(FileProtocol.Kind.CREATE_CACHE));
        assertThat(tb1.getImportProtocol().getLocation(), is(tb1.getDfsFilePath()));
        assertThat(tb1.getStartTimestamp(), is(nullValue()));
        CacheInfo info = tb1.getImportProtocol().getInfo();
        assertThat(info, is(notNullValue()));

        assertThat(info.getId(), is("tb1"));
        assertThat(info.getFeatureVersion(), is(CacheInfo.FEATURE_VERSION));
        assertThat(info.getTimestamp(), is(not(nullValue())));
        assertThat(info.getTableName(), is("__TG_TEST1"));
        assertThat(info.getColumnNames(), is((Object) new HashSet<String>(tb1.getImportTargetColumns())));
        assertThat(info.getModelClassName(), is(ImportTarget1.class.getName()));
        assertThat(info.getModelClassVersion(), is(new ImportTarget1().__tgc__DataModelVersion()));
    }
View Full Code Here

    public void update_cache() throws Exception {
        ImportBean bean = createBean();

        Map<String, ImportTargetTableBean> targetTable = new HashMap<String, ImportTargetTableBean>();

        final ImportTargetTableBean tb1 = new ImportTargetTableBean();
        tb1.setCacheId("tb1");
        tb1.setDfsFilePath("tb1");
        tb1.setImportTargetType(ImportTarget1.class);
        tb1.setImportTargetColumns(Arrays.asList("A"));
        tb1.setSearchCondition("");
        targetTable.put("__TG_TEST1", tb1);

        Connection conn = DBConnection.getConnection();
        try {
            LocalCacheInfoRepository repo = new LocalCacheInfoRepository(conn);
            repo.putCacheInfo(new LocalCacheInfo(
                    tb1.getCacheId(),
                    null,
                    null,
                    "__TG_TEST1",
                    tb1.getDfsFilePath()));

        } finally {
            DBConnection.closeConn(conn);
        }

        bean.setTargetTable(targetTable);
        ImportProtocolDecide service = new ImportProtocolDecide() {
            @Override
            protected Map<String, CacheInfo> collectRemoteCacheInfo(ImportBean _)
                    throws BulkLoaderSystemException {
                return Collections.singletonMap("tb1", new CacheInfo(
                        CacheInfo.FEATURE_VERSION,
                        tb1.getCacheId(),
                        offset(-1),
                        "__TG_TEST1",
                        tb1.getImportTargetColumns(),
                        tb1.getImportTargetType().getName(),
                        new ImportTarget1().__tgc__DataModelVersion()));
            }
        };
        service.execute(bean);

        assertThat(tb1.getImportProtocol().getKind(), is(FileProtocol.Kind.UPDATE_CACHE));
        assertThat(tb1.getImportProtocol().getLocation(), is(tb1.getDfsFilePath()));
        assertThat(tb1.getStartTimestamp(), is(notNullValue()));
        CacheInfo info = tb1.getImportProtocol().getInfo();
        assertThat(info, is(notNullValue()));

        assertThat(info.getId(), is("tb1"));
        assertThat(info.getFeatureVersion(), is(CacheInfo.FEATURE_VERSION));
        assertThat(info.getTimestamp(), is(not(nullValue())));
        assertThat(info.getTableName(), is("__TG_TEST1"));
        assertThat(info.getColumnNames(), is((Object) new HashSet<String>(tb1.getImportTargetColumns())));
        assertThat(info.getModelClassName(), is(ImportTarget1.class.getName()));
        assertThat(info.getModelClassVersion(), is(new ImportTarget1().__tgc__DataModelVersion()));
    }
View Full Code Here

        // テストデータの指定
//        String pattern = "patternC01";

        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean = new ImportTargetTableBean();
        tableBean.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA1", "INTDATA1", "DATEDATA1"}));
        tableBean.setSearchCondition(null);
        tableBean.setUseCache(false);
        tableBean.setLockType(ImportTableLockType.TABLE);
        tableBean.setLockedOperation(ImportTableLockedOperation.ERROR);
        tableBean.setImportTargetType(null);
        tableBean.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET1", tableBean);
        ImportBean bean = createBean(new String[]{jobflowId, executionId, "20101021221015", "3", "5"}, targetTable);

        String jobflowSid = "1";
View Full Code Here

        // テストデータをセット
        util.storeToDatabase(false);

        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean = new ImportTargetTableBean();
        tableBean.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA1", "INTDATA1", "DATEDATA1"}));
        tableBean.setSearchCondition(null);
        tableBean.setUseCache(false);
        tableBean.setLockType(ImportTableLockType.TABLE);
        tableBean.setLockedOperation(ImportTableLockedOperation.ERROR);
        tableBean.setImportTargetType(null);
        tableBean.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET1", tableBean);
        ImportBean bean = createBean(new String[]{jobflowId, "JOB_FLOW01-002", "20101021221015", "3", "5"}, targetTable);

        String jobflowSid = "9";
View Full Code Here

//        // テストデータの指定
//        String pattern = "patternC02";

        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA1", "INTDATA1", "DATEDATA1"}));
        tableBean1.setSearchCondition("INTDATA1=11");
        tableBean1.setUseCache(false);
        tableBean1.setLockType(ImportTableLockType.NONE);
        tableBean1.setLockedOperation(ImportTableLockedOperation.FORCE);
        tableBean1.setImportTargetType(null);
        tableBean1.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET1", tableBean1);

        ImportTargetTableBean tableBean2 = new ImportTargetTableBean();
        tableBean2.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA2", "INTDATA2"}));
        tableBean2.setSearchCondition("TEXTDATA2='testdata2-3'");
        tableBean2.setUseCache(false);
        tableBean2.setLockType(ImportTableLockType.RECORD);
        tableBean2.setLockedOperation(ImportTableLockedOperation.OFF);
        tableBean2.setImportTargetType(null);
        tableBean2.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET2", tableBean2);
        ImportBean bean = createBean(new String[]{jobflowId, "JOB_FLOW01-003", "20101021221015", "3", "5"}, targetTable);

        String jobflowSid = "2";
View Full Code Here

        Map<String, ImportTargetTableBean> importTargetTable = loder.getImportTargetTables();

        // 実行結果を検証
        assertTrue(result);
        // テーブルXXXの検証
        ImportTargetTableBean table1 = importTargetTable.get("XXX");
        // Import対象カラム
        assertEquals(3, table1.getImportTargetColumns().size());
        assertEquals("columnA", table1.getImportTargetColumns().get(0));
        assertEquals("columnB", table1.getImportTargetColumns().get(1));
        assertEquals("columnC", table1.getImportTargetColumns().get(2));
        // 検索条件
        assertEquals("columnA='1' and columnB='2'", table1.getSearchCondition());
        // ロック取得タイプ
        assertEquals(ImportTableLockType.find("1"), table1.getLockType());
        // ロック済みの場合の取り扱い
        assertEquals(ImportTableLockedOperation.find("3"), table1.getLockedOperation());
        // JavaBeansクラス名
        assertEquals("com.asakusafw.bulkloader.bean.ImportTargetTableBean", table1.getImportTargetType().getName());
        // HDFS上の出力パス
        assertEquals("/${user}/${execution_id}/import_target1", table1.getDfsFilePath());

        // テーブルYYYの検証
        ImportTargetTableBean table2 = importTargetTable.get("YYY");
        // Import対象カラム
        assertEquals(1, table2.getImportTargetColumns().size());
        assertEquals("columnA", table2.getImportTargetColumns().get(0));
        // 検索条件
        assertEquals("columnA='1' or columnB=(select columnB from tableA where column='3')", table2.getSearchCondition());
        // ロック取得タイプ
        assertEquals(ImportTableLockType.find("2"), table2.getLockType());
        // ロック済みの場合の取り扱い
        assertEquals(ImportTableLockedOperation.find("1"), table2.getLockedOperation());
        // JavaBeansクラス名
        assertEquals("com.asakusafw.bulkloader.bean.ImportBean", table2.getImportTargetType().getName());
        // HDFS上の出力パス
        assertEquals("/asakusa/import/11/YYY", table2.getDfsFilePath());

        // テーブルZZZの検証
        ImportTargetTableBean table3 = importTargetTable.get("ZZZ");
        // Import対象カラム
        assertEquals(2, table3.getImportTargetColumns().size());
        assertEquals("columnA", table3.getImportTargetColumns().get(0));
        assertEquals("columnB", table3.getImportTargetColumns().get(1));
        // 検索条件
        assertNull(table3.getSearchCondition());
        // ロック取得タイプ
        assertEquals(ImportTableLockType.find("3"), table3.getLockType());
        // ロック済みの場合の取り扱い
        assertEquals(ImportTableLockedOperation.find("2"), table3.getLockedOperation());
        // JavaBeansクラス名
        assertEquals("com.asakusafw.bulkloader.bean.ExporterBean", table3.getImportTargetType().getName());
        // HDFS上の出力パス
        assertEquals("/asakusa/import/11/ZZZ", table3.getDfsFilePath());
    }
View Full Code Here

//        // テストデータの指定
//        String pattern = "patternC03";

        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setImportTargetColumns(Arrays.asList(new String[]{"INTDATA1", "TEXTDATA1"}));
        tableBean1.setSearchCondition(null);
        tableBean1.setUseCache(false);
        tableBean1.setLockType(ImportTableLockType.RECORD);
        tableBean1.setLockedOperation(ImportTableLockedOperation.ERROR);
        tableBean1.setImportTargetType(null);
        tableBean1.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET1", tableBean1);
        ImportBean bean = createBean(new String[]{jobflowId, "JOB_FLOW01-004", "20101021221015", "3", "5"}, targetTable);

        String jobflowSid = "3";
View Full Code Here

            fos = new FileOutputStream(file[1]);
            fos.write(123);

            // ImportBeanを生成
            Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
            ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
            tableBean1.setImportTargetColumns(Arrays.asList(new String[]{"INTDATA1", "TEXTDATA1"}));
            tableBean1.setSearchCondition(null);
            tableBean1.setUseCache(false);
            tableBean1.setLockType(ImportTableLockType.RECORD);
            tableBean1.setLockedOperation(ImportTableLockedOperation.ERROR);
            tableBean1.setImportTargetType(null);
            tableBean1.setDfsFilePath(null);
            targetTable.put("IMPORT_TARGET1", tableBean1);
            ImportBean bean = createBean(new String[]{jobflowId, executionId, "20101021221015", "3", "5"}, targetTable);

            // テスト対象クラス実行
            ImportFileCreate create = new ImportFileCreate();
View Full Code Here

//        // テストデータの指定
//        String pattern = "patternC01";

        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean = new ImportTargetTableBean();
        tableBean.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA1", "INTDATA1", "DATEDATA1"}));
        tableBean.setSearchCondition(null);
        tableBean.setUseCache(false);
        tableBean.setLockType(ImportTableLockType.TABLE);
        tableBean.setLockedOperation(ImportTableLockedOperation.ERROR);
        tableBean.setImportTargetType(null);
        tableBean.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET3", tableBean);
        ImportBean bean = createBean(new String[]{jobflowId, executionId, "20101021221015", "3", "5"}, targetTable);

        String jobflowSid = "5";
View Full Code Here

TOP

Related Classes of com.asakusafw.bulkloader.bean.ImportTargetTableBean

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.