Examples of ImportTargetTableBean


Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

    public void checkImportParamTest12() throws Exception {
        JobFlowParamLoader loader = new JobFlowParamLoader();

        // 設定
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA1", "INTDATA1", "DATEDATA1"}));
        tableBean1.setSearchCondition("INTDATA1=11");
        tableBean1.setUseCache(false);
        tableBean1.setLockType(ImportTableLockType.find("2"));
        tableBean1.setLockedOperation(ImportTableLockedOperation.find("2"));
        tableBean1.setImportTargetType(this.getClass());
        tableBean1.setDfsFilePath("hdfs://localhost/user/asakusa/import/11/XXX_1");
        targetTable.put("IMPORT_TARGET1", tableBean1);

        // テスト実行
        boolean result = loader.checkImportParam(targetTable, targetName, "1", "dummyFileName", false);
        assertFalse(result);

        tableBean1.setLockType(ImportTableLockType.find("1"));
        result = loader.checkImportParam(targetTable, targetName, "1", "dummyFileName", false);
        assertFalse(result);
    }
View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

    public void checkImportParamTest13() throws Exception {
        JobFlowParamLoader loader = new JobFlowParamLoader();

        // 設定
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setImportTargetColumns(Arrays.asList(new String[]{"TEXTDATA1", "INTDATA1", "DATEDATA1"}));
        tableBean1.setSearchCondition("INTDATA1=11");
        tableBean1.setUseCache(false);
        tableBean1.setLockType(ImportTableLockType.find("3"));
        tableBean1.setLockedOperation(ImportTableLockedOperation.find("3"));
        tableBean1.setImportTargetType(this.getClass());
        tableBean1.setDfsFilePath("hdfs://localhost/user/asakusa/import/11/XXX_1");
        targetTable.put("IMPORT_TARGET1", tableBean1);

        // テスト実行
        boolean result = loader.checkImportParam(targetTable, targetName, "1", "dummyFileName", false);
        assertFalse(result);

        tableBean1.setLockedOperation(ImportTableLockedOperation.find("1"));
        result = loader.checkImportParam(targetTable, targetName, "1", "dummyFileName", false);
        assertFalse(result);
    }
View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

     */
    @Test
    public void importFileTest01() throws Exception {
        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setDfsFilePath("${execution_id}/import/XXX");
        tableBean1.setImportTargetType(this.getClass());
        targetTable.put("IMPORT_TARGET1", tableBean1);
        ImportTargetTableBean tableBean2 = new ImportTargetTableBean();
        tableBean2.setDfsFilePath("asakusa/import/XXX");
        tableBean2.setImportTargetType(this.getClass());
        targetTable.put("IMPORT_TARGET2", tableBean2);
        ImportBean bean = new ImportBean();
        bean.setTargetTable(targetTable);
        bean.setExecutionId(executionId);

View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

     */
    @Test
    public void importFileTest02() throws Exception {
        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setDfsFilePath("Dummy");
        tableBean1.setImportTargetType(this.getClass());
        targetTable.put("IMPORT_TARGET1", tableBean1);
        ImportTargetTableBean tableBean2 = new ImportTargetTableBean();
        tableBean2.setDfsFilePath("Dummy");
        tableBean2.setImportTargetType(this.getClass());
        targetTable.put("IMPORT_TARGET2", tableBean2);
        ImportBean bean = new ImportBean();
        bean.setTargetTable(targetTable);
        bean.setExecutionId(executionId);

View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

    @Test
    public void create_cache() throws Exception {
        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();

        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setDfsFilePath("/${user}/${execution_id}/import/c1");
        tableBean1.setImportTargetType(ImportTarget1.class);
        tableBean1.setCacheId("c1");
        targetTable.put("IMPORT_TARGET1", tableBean1);

        ImportTargetTableBean tableBean2 = new ImportTargetTableBean();
        tableBean2.setDfsFilePath("/${user}/${execution_id}/import/c2");
        tableBean2.setImportTargetType(ImportTarget1.class);
        tableBean2.setCacheId("c2");
        targetTable.put("IMPORT_TARGET2", tableBean2);

        ImportBean bean = new ImportBean();
        bean.setTargetTable(targetTable);
        bean.setExecutionId(executionId);

        final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
        FileList.Writer writer = FileList.createWriter(buffer, true);
        final CacheInfo info = new CacheInfo("a", "c1", Calendar.getInstance(), "IMPORT_TARGET1", Arrays.asList("a", "b"), "X", 0);
        writer.openNext(new FileProtocol(
                FileProtocol.Kind.CREATE_CACHE,
                tableBean1.getDfsFilePath(),
                info)
        ).close();
        writer.openNext(new FileProtocol(
                FileProtocol.Kind.CONTENT,
                FileNameUtil.createSendImportFileName("IMPORT_TARGET2"),
                null)
        ).close();
        writer.close();

        final File output = folder.newFolder("output");
        final List<String> files = new ArrayList<String>();
        final List<String> builders = new ArrayList<String>();

        // テスト対象クラス実行
        DummyHdfsFileImport fileImport = new DummyHdfsFileImport(0) {
            @Override
            protected InputStream getInputStream() {
                return new ByteArrayInputStream(buffer.toByteArray());
            }
            @Override
            protected URI resolveLocation(ImportBean _, String user, String location) throws BulkLoaderSystemException {
                return new File(output, location).toURI();
            }
            @Override
            protected <T> long write(
                    Class<T> targetTableModel,
                    URI dfsFilePath,
                    InputStream inputStream) throws BulkLoaderSystemException {
                try {
                    inputStream.close();
                    files.add(new File(dfsFilePath).getPath());
                } catch (Exception e) {
                    throw new AssertionError(e);
                }
                return 1;
            }
            @Override
            protected Callable<?> createCacheBuilder(
                    String subcommand,
                    ImportBean _,
                    URI location,
                    final CacheInfo target) throws IOException {
                assertThat(subcommand, is(CacheBuildClient.SUBCOMMAND_CREATE));
                assertThat(target, is(info));
                return new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        builders.add(target.getId());
                        return null;
                    }
                };
            }
        };
        boolean result = fileImport.importFile(bean, "hadoop");

        assertTrue(result);

        assertThat(files.size(), is(2));
        assertThat(files.get(0), endsWith("c1/PATCH/part-0"));
        assertThat(files.get(1), endsWith(tableBean2.getDfsFilePath()));

        Collections.sort(builders);
        assertThat(builders.size(), is(1));
        assertThat(builders.get(0), is("c1"));
    }
View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

            conn = DBConnection.getConnection();

            // import対象テーブルの分繰り返してファイル作成を行う。
            List<String> list = bean.getImportTargetTableList();
            for (String tableName : list) {
                ImportTargetTableBean targetTable = bean.getTargetTable(tableName);
                ImportTableLockType lockType = targetTable.getLockType();

                // ファイル名を生成
                File importFile = FileNameUtil.createImportFilePath(
                        bean.getTargetName(), bean.getJobflowId(), bean.getExecutionId(), tableName);

                LOG.info("TG-IMPORTER-03003",
                        tableName,
                        lockType,
                        importFile.getAbsolutePath());

                // ファイルが既に存在する場合はファイルを削除する。
                if (importFile.exists()) {
                    if (!importFile.delete()) {
                        // ファイルの削除に失敗した場合は異常終了する
                        throw new BulkLoaderSystemException(getClass(), "TG-IMPORTER-03001",
                                importFile.getName());
                    }
                }

                // ロック取得有無に応じてレコードを抽出し、ファイルを生成する
                if (ImportTableLockType.TABLE.equals(lockType)) {
                    // ロック取得有無が「テーブルロック」の場合、検索条件でレコードを抽出する
                    createFileWithCondition(
                            conn,
                            tableName,
                            targetTable,
                            importFile);
                } else if (ImportTableLockType.RECORD.equals(lockType)) {
                    // ロック取得有無が「行ロック」の場合、ジョブフローIDを条件にレコードを抽出する
                    createFileWithJobFlowSid(
                            conn,
                            tableName,
                            targetTable,
                            jobflowSid,
                            importFile);
                } else if (ImportTableLockType.NONE.equals(lockType)) {
                    // ロック取得有無が「ロックを取得しない」の場合、検索条件でレコードを抽出する
                    createFileWithCondition(
                            conn,
                            tableName,
                            targetTable,
                            importFile);
                }
                // ファイルが生成出来なかった場合は0byteのファイルを作成する。
                if (!importFile.exists()) {
                    try {
                        if (!importFile.createNewFile()) {
                            throw new BulkLoaderSystemException(getClass(), "TG-IMPORTER-03002");
                        }
                        LOG.info("TG-IMPORTER-03005",
                                tableName,
                                lockType,
                                importFile.getAbsolutePath());
                    } catch (IOException e) {
                        throw new BulkLoaderSystemException(getClass(), "TG-IMPORTER-03002");
                    }
                } else {
                    LOG.info("TG-IMPORTER-03004",
                            tableName,
                            lockType,
                            importFile.getAbsolutePath());
                }

                // 生成したファイル名を追加
                targetTable.setImportFile(importFile);
            }

            // 正常終了
            return true;
View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

    @Test
    public void update_cache() throws Exception {
        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();

        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setDfsFilePath("/${user}/${execution_id}/import/c1");
        tableBean1.setImportTargetType(ImportTarget1.class);
        tableBean1.setCacheId("c1");
        targetTable.put("IMPORT_TARGET1", tableBean1);

        ImportTargetTableBean tableBean2 = new ImportTargetTableBean();
        tableBean2.setDfsFilePath("/${user}/${execution_id}/import/c2");
        tableBean2.setImportTargetType(ImportTarget1.class);
        tableBean2.setCacheId("c2");
        targetTable.put("IMPORT_TARGET2", tableBean2);

        ImportBean bean = new ImportBean();
        bean.setTargetTable(targetTable);
        bean.setExecutionId(executionId);

        final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
        FileList.Writer writer = FileList.createWriter(buffer, true);
        final CacheInfo info = new CacheInfo("a", "c1", Calendar.getInstance(), "IMPORT_TARGET1", Arrays.asList("a", "b"), "X", 0);
        writer.openNext(new FileProtocol(
                FileProtocol.Kind.UPDATE_CACHE,
                tableBean1.getDfsFilePath(),
                info)
        ).close();
        writer.openNext(new FileProtocol(
                FileProtocol.Kind.CONTENT,
                FileNameUtil.createSendImportFileName("IMPORT_TARGET2"),
                null)
        ).close();
        writer.close();

        final File output = folder.newFolder("output");
        final List<String> files = new ArrayList<String>();
        final List<String> builders = new ArrayList<String>();

        // テスト対象クラス実行
        DummyHdfsFileImport fileImport = new DummyHdfsFileImport(0) {
            @Override
            protected InputStream getInputStream() {
                return new ByteArrayInputStream(buffer.toByteArray());
            }
            @Override
            protected URI resolveLocation(ImportBean _, String user, String location) throws BulkLoaderSystemException {
                return new File(output, location).toURI();
            }
            @Override
            protected <T> long write(
                    Class<T> targetTableModel,
                    URI dfsFilePath,
                    InputStream inputStream) throws BulkLoaderSystemException {
                try {
                    inputStream.close();
                    files.add(new File(dfsFilePath).getPath());
                } catch (Exception e) {
                    throw new AssertionError(e);
                }
                return 1;
            }
            @Override
            protected Callable<?> createCacheBuilder(
                    String subcommand,
                    ImportBean _,
                    URI location,
                    final CacheInfo target) throws IOException {
                assertThat(subcommand, is(CacheBuildClient.SUBCOMMAND_UPDATE));
                assertThat(target, is(info));
                return new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        builders.add(target.getId());
                        return null;
                    }
                };
            }
        };
        boolean result = fileImport.importFile(bean, "hadoop");

        assertTrue(result);

        assertThat(files.size(), is(2));
        assertThat(files.get(0), endsWith("c1/PATCH/part-0"));
        assertThat(files.get(1), endsWith(tableBean2.getDfsFilePath()));

        Collections.sort(builders);
        assertThat(builders.size(), is(1));
        assertThat(builders.get(0), is("c1"));
    }
View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

     */
    @Test
    public void extract_broken() throws Exception {
        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setDfsFilePath("/${user}/${execution_id}/import/XXX");
        tableBean1.setImportTargetType(ImportTarget1.class);
        targetTable.put("IMPORT_TARGET1", tableBean1);
        ImportBean bean = new ImportBean();
        bean.setTargetTable(targetTable);
        bean.setExecutionId(executionId);

View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

     */
    @Test
    public void importFileTest03() throws Exception {
        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setDfsFilePath("Dummy");
        tableBean1.setImportTargetType(this.getClass());
        targetTable.put("IMPORT_TARGET1", tableBean1);
        ImportTargetTableBean tableBean2 = new ImportTargetTableBean();
        tableBean2.setDfsFilePath("Dummy");
        tableBean2.setImportTargetType(this.getClass());
        targetTable.put("IMPORT_TARGET2", tableBean2);
        ImportBean bean = new ImportBean();
        bean.setTargetTable(targetTable);

        // テスト対象クラス実行
View Full Code Here

Examples of com.asakusafw.bulkloader.bean.ImportTargetTableBean

     */
    @Test
    public void importFileTest04() throws Exception {
        // ImportBeanを生成
        Map<String, ImportTargetTableBean> targetTable = new LinkedHashMap<String, ImportTargetTableBean>();
        ImportTargetTableBean tableBean1 = new ImportTargetTableBean();
        tableBean1.setDfsFilePath("Dummy");
        tableBean1.setImportTargetType(this.getClass());
        targetTable.put("IMPORT_TARGET1", tableBean1);
        ImportTargetTableBean tableBean2 = new ImportTargetTableBean();
        tableBean2.setDfsFilePath("Dummy");
        tableBean2.setImportTargetType(this.getClass());
        targetTable.put("IMPORT_TARGET2", tableBean2);
        ImportBean bean = new ImportBean();
        bean.setTargetTable(targetTable);
        bean.setExecutionId(executionId);

View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.