Package com.asakusafw.bulkloader.bean

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean


        }
        LOG.info("TG-IMPORTER-11001",
                bean.getTargetName(), bean.getBatchId(), bean.getJobflowId(), bean.getExecutionId());
        boolean findCache = false;
        for (String tableName : bean.getImportTargetTableList()) {
            ImportTargetTableBean table = bean.getTargetTable(tableName);
            if (table.getCacheId() == null) {
                setContentProtocol(tableName, table);
            } else {
                findCache = true;
            }
        }
View Full Code Here


        if (bean == null) {
            throw new IllegalArgumentException("bean must not be null"); //$NON-NLS-1$
        }
        boolean findCache = false;
        for (String tableName : bean.getImportTargetTableList()) {
            ImportTargetTableBean table = bean.getTargetTable(tableName);
            if (table.getCacheId() != null) {
                findCache = true;
            }
        }
        if (findCache == false) {
            return;
View Full Code Here

            LOG.info("TG-IMPORTER-11006",
                    bean.getTargetName(), bean.getBatchId(), bean.getJobflowId(), bean.getExecutionId());
            Map<String, CacheInfo> map = collectRemoteCacheInfo(bean);

            for (String tableName : bean.getImportTargetTableList()) {
                ImportTargetTableBean tableInfo = bean.getTargetTable(tableName);
                String cacheId = tableInfo.getCacheId();
                if (cacheId == null) {
                    assert tableInfo.getImportProtocol() != null;
                    continue;
                }

                CacheInfo currentRemoteInfo = map.get(tableInfo.getDfsFilePath());
                Calendar startTimestamp = computeStartTimestamp(currentRemoteInfo, repository, tableName, tableInfo);

                tableInfo.setStartTimestamp(startTimestamp);
                LocalCacheInfo nextLocalInfo = new LocalCacheInfo(
                        cacheId,
                        null,
                        startTimestamp,
                        tableName,
                        tableInfo.getDfsFilePath());

                ThunderGateCacheSupport model = createDataModelObject(tableName, tableInfo);
                Calendar nextTimestamp = repository.putCacheInfo(nextLocalInfo);
                CacheInfo nextRemoteInfo = new CacheInfo(
                        CacheInfo.FEATURE_VERSION,
                        cacheId,
                        nextTimestamp,
                        tableName,
                        tableInfo.getImportTargetColumns(),
                        model.getClass().getName(),
                        model.__tgc__DataModelVersion());

                FileProtocol.Kind kind = startTimestamp == null
                    ? FileProtocol.Kind.CREATE_CACHE
                    : FileProtocol.Kind.UPDATE_CACHE;
                FileProtocol protocol = new FileProtocol(kind, tableInfo.getDfsFilePath(), nextRemoteInfo);
                tableInfo.setImportProtocol(protocol);
            }
            succeed = true;
        } finally {
            if (succeed == false) {
                repository.releaseLock(bean.getExecutionId());
View Full Code Here

            ImportBean bean,
            LocalCacheInfoRepository repository) throws BulkLoaderSystemException, BulkLoaderReRunnableException {
        assert bean != null;
        assert repository != null;
        for (String tableName : bean.getImportTargetTableList()) {
            ImportTargetTableBean tableInfo = bean.getTargetTable(tableName);
            if (tableInfo.getCacheId() == null) {
                assert tableInfo.getImportProtocol() != null;
                continue;
            }
            boolean locked = repository.tryLock(bean.getExecutionId(), tableInfo.getCacheId(), tableName);
            if (locked == false) {
                throw new BulkLoaderReRunnableException(getClass(), "TG-IMPORTER-11005",
                        tableName,
                        tableInfo.getCacheId());
            }
        }
    }
View Full Code Here

    }

    private boolean hasCacheUser(ImportBean bean) {
        assert bean != null;
        for (String tableName : bean.getImportTargetTableList()) {
            ImportTargetTableBean table = bean.getTargetTable(tableName);
            if (table.getCacheId() != null) {
                return true;
            }
        }
        return false;
    }
View Full Code Here

            @Override
            public Void call() throws IOException {
                FileList.Writer writer = provider.openWriter(false);
                try {
                    for (String tableName : bean.getImportTargetTableList()) {
                        ImportTargetTableBean table = bean.getTargetTable(tableName);
                        if (table.getCacheId() == null || table.getDfsFilePath() == null) {
                            continue;
                        }
                        FileProtocol protocol = new FileProtocol(
                                FileProtocol.Kind.GET_CACHE_INFO,
                                table.getDfsFilePath(),
                                null);

                        // send only header
                        writer.openNext(protocol).close();
                    }
View Full Code Here

            // Import対象テーブル毎にファイルの読み込み・書き出しの処理を行う
            List<String> list = arrangeSendOrder(bean);
            for (String tableName : list) {
                long tableStartTime = System.currentTimeMillis();
                ImportTargetTableBean targetTable = bean.getTargetTable(tableName);
                LOG.info("TG-IMPORTER-04004",
                        tableName,
                        targetTable.getImportFile().getAbsolutePath(),
                        compType.getSymbol());
                long dumpFileSize = sendTableFile(writer, tableName, targetTable);
                LOG.info("TG-PROFILE-02003",
                        bean.getTargetName(),
                        bean.getBatchId(),
                        bean.getJobflowId(),
                        bean.getExecutionId(),
                        tableName,
                        dumpFileSize,
                        System.currentTimeMillis() - tableStartTime);
                LOG.info("TG-IMPORTER-04005",
                        tableName,
                        targetTable.getImportFile().getAbsolutePath(),
                        compType.getSymbol());
            }
            writer.close();
            provider.waitForComplete();
            LOG.info("TG-PROFILE-02001",
View Full Code Here

        assert bean != null;
        final Map<String, ImportTargetTableBean> tables = new HashMap<String, ImportTargetTableBean>();
        final Map<String, Long> sizes = new HashMap<String, Long>();
        List<String> tableNames = new ArrayList<String>(bean.getImportTargetTableList());
        for (String tableName : tableNames) {
            ImportTargetTableBean tableBean = bean.getTargetTable(tableName);
            tables.put(tableName, tableBean);
            sizes.put(tableName, tableBean.getImportFile().length());
        }
        Collections.sort(tableNames, new Comparator<String>() {
            @Override
            public int compare(String o1, String o2) {
                ImportTargetTableBean t1 = tables.get(o1);
                ImportTargetTableBean t2 = tables.get(o2);

                // put cached table on top
                if (t1.getCacheId() != null && t2.getCacheId() == null) {
                    return -1;
                } else if (t1.getCacheId() == null && t2.getCacheId() != null) {
                    return +1;
                }

                // put large file on top
                long s1 = sizes.get(o1);
View Full Code Here

//        // テストデータの指定
//        String pattern = "patternL01";

        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean = new ImportTargetTableBean();
        tableBean.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA1", "INTDATA1", "DATEDATA1"}));
        tableBean.setSearchCondition(null);
        tableBean.setUseCache(false);
        tableBean.setLockType(ImportTableLockType.TABLE);
        tableBean.setLockedOperation(ImportTableLockedOperation.ERROR);
        tableBean.setImportTargetType(null);
        tableBean.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET1", tableBean);
        ImportBean bean = createBean(new String[]{jobflowId, executionId, "20101021221015", "3", "5"}, targetTable);

        // テスト対象クラス実行
        TargetDataLock lock = new TargetDataLock();
View Full Code Here

        // テストデータの指定
//        String pattern = "patternL02";

        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA1", "INTDATA1", "DATEDATA1"}));
        tableBean1.setSearchCondition("INTDATA1=11");
        tableBean1.setUseCache(false);
        tableBean1.setLockType(ImportTableLockType.NONE);
        tableBean1.setLockedOperation(ImportTableLockedOperation.FORCE);
        tableBean1.setImportTargetType(null);
        tableBean1.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET1", tableBean1);

        ImportTargetTableBean tableBean2 = new ImportTargetTableBean();
        tableBean2.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA2"}));
        tableBean2.setSearchCondition("TEXTDATA2='testdata2-3'");
        tableBean2.setUseCache(false);
        tableBean2.setLockType(ImportTableLockType.RECORD);
        tableBean2.setLockedOperation(ImportTableLockedOperation.OFF);
        tableBean2.setImportTargetType(null);
        tableBean2.setDfsFilePath(null);
        targetTable.put("IMPORT_TARGET2", tableBean2);
        ImportBean bean = createBean(new String[]{jobflowId, executionId, "20101021221015", "3", "5"}, targetTable);

        // テスト対象クラス実行
        TargetDataLock lock = new TargetDataLock();
View Full Code Here

TOP

Related Classes of com.asakusafw.bulkloader.bean.ImportTargetTableBean

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.