Search in sources :

Example 1 with ITISFileSystem

use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.

the class TestLocalTableDumpAndIndex method startIndexBuild.

public void startIndexBuild(String solrCoreName, IJoinTaskContext execContext, ITISCoordinator zkCoordinator, String timePoint) throws Exception {
    LocalIndexBuilderTriggerFactory builderTriggerFactory = new LocalIndexBuilderTriggerFactory();
    File localOfflineDir = LocalTableDumpFactory.getLocalOfflineRootDir();
    String indexName = ITestDumpCommon.INDEX_COLLECTION;
    String groupNum = "0";
    Integer taskId = 123;
    ITISFileSystem fileSystem = builderTriggerFactory.getFileSystem();
    ImportDataProcessInfo buildParam = new ImportDataProcessInfo(taskId, fileSystem, zkCoordinator);
    buildParam.setIndexName(indexName);
    MockDataSourceFactory employeesDataSource = MockDataSourceFactory.getMockEmployeesDataSource();
    List<ColumnMetaData> eployeeTableMeta = employeesDataSource.getTableMetadata(TABLE_EMPLOYEES);
    String colsLiteria = eployeeTableMeta.stream().map((c) -> c.getKey()).collect(Collectors.joining(","));
    buildParam.setBuildTableTitleItems(colsLiteria);
    SnapshotDomain snapshot = com.qlangtech.tis.manage.common.SnapshotDomainUtils.mockEmployeeSnapshotDomain();
    snapshot.writeResource2fs(fileSystem, buildParam.getCoreName(Integer.parseInt(groupNum)), ConfigFileReader.FILE_SCHEMA);
    snapshot.writeResource2fs(fileSystem, buildParam.getCoreName(Integer.parseInt(groupNum)), ConfigFileReader.FILE_SOLR);
    IRemoteJobTrigger buildJob = builderTriggerFactory.createBuildJob(execContext, timePoint, indexName, groupNum, buildParam);
    buildJob.submitJob();
    /**
     * -----------------------------------------------------------
     * 开始执行索引build
     *         -----------------------------------------------------------
     */
    TestLocalTableDumpAndIndex.waitJobTerminatorAndAssert(buildJob);
    // long hdfsTimeStamp, String hdfsUser, SolrCore core, File indexDir, SolrQueryResponse rsp, String taskId
    indexFlowback2SolrEngineNode(solrCoreName, timePoint, localOfflineDir, taskId);
}
Also used : IRemoteJobTrigger(com.qlangtech.tis.fullbuild.indexbuild.IRemoteJobTrigger) TaskContext(com.qlangtech.tis.fullbuild.indexbuild.TaskContext) ConfigFileReader(com.qlangtech.tis.manage.common.ConfigFileReader) Date(java.util.Date) ColumnMetaData(com.qlangtech.tis.plugin.ds.ColumnMetaData) TISTestCase(com.qlangtech.tis.test.TISTestCase) ITableDumpConstant(com.qlangtech.tis.order.dump.task.ITableDumpConstant) ITISCoordinator(com.qlangtech.tis.cloud.ITISCoordinator) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) MockTaskContextUtils(com.qlangtech.tis.fullbuild.indexbuild.MockTaskContextUtils) DataSourceFactory(com.qlangtech.tis.plugin.ds.DataSourceFactory) MockTisCoreAdminHandler(org.apache.solr.handler.admin.MockTisCoreAdminHandler) ITestDumpCommon(com.qlangtech.tis.order.dump.task.ITestDumpCommon) IJoinTaskContext(com.qlangtech.tis.order.center.IJoinTaskContext) MockDataSourceFactory(com.qlangtech.tis.order.dump.task.MockDataSourceFactory) Set(java.util.Set) FileUtils(org.apache.commons.io.FileUtils) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) File(java.io.File) MockZKUtils(com.qlangtech.tis.cloud.MockZKUtils) TimeUnit(java.util.concurrent.TimeUnit) LocalIndexBuilderTriggerFactory(com.qlangtech.tis.indexbuild.LocalIndexBuilderTriggerFactory) CountDownLatch(java.util.concurrent.CountDownLatch) Queues(com.google.common.collect.Queues) LocalTableDumpFactory(com.qlangtech.tis.dump.LocalTableDumpFactory) IParamContext(com.qlangtech.tis.order.center.IParamContext) SolrQueryResponse(org.apache.solr.response.SolrQueryResponse) List(java.util.List) SnapshotDomain(com.qlangtech.tis.manage.common.SnapshotDomain) RunningStatus(com.qlangtech.tis.fullbuild.indexbuild.RunningStatus) ArrayDeque(java.util.ArrayDeque) ImportDataProcessInfo(com.qlangtech.tis.trigger.jst.ImportDataProcessInfo) IRemoteJobTrigger(com.qlangtech.tis.fullbuild.indexbuild.IRemoteJobTrigger) SnapshotDomain(com.qlangtech.tis.manage.common.SnapshotDomain) LocalIndexBuilderTriggerFactory(com.qlangtech.tis.indexbuild.LocalIndexBuilderTriggerFactory) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) ImportDataProcessInfo(com.qlangtech.tis.trigger.jst.ImportDataProcessInfo) MockDataSourceFactory(com.qlangtech.tis.order.dump.task.MockDataSourceFactory) ColumnMetaData(com.qlangtech.tis.plugin.ds.ColumnMetaData) File(java.io.File)

Example 2 with ITISFileSystem

use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.

the class TestDataXHudiWriter method testFlinkSqlTableDDLCreate.

@Test
public void testFlinkSqlTableDDLCreate() throws Exception {
    FileSystemFactory fsFactory = EasyMock.createMock("fsFactory", FileSystemFactory.class);
    ITISFileSystem fs = EasyMock.createMock("fileSystem", ITISFileSystem.class);
    // fs.getRootDir()
    String child = "default/customer_order_relation";
    String dataDir = "hudi";
    IPath rootPath = new HdfsPath(HdfsFileSystemFactoryTestUtils.DEFAULT_HDFS_ADDRESS + "/user/admin");
    IPath tabPath = new HdfsPath(rootPath, child);
    IPath hudiDataPath = new HdfsPath(tabPath, dataDir);
    EasyMock.expect(fs.getPath(rootPath, child)).andReturn(tabPath);
    EasyMock.expect(fs.getPath(tabPath, dataDir)).andReturn(hudiDataPath);
    EasyMock.expect(fs.getRootDir()).andReturn(rootPath);
    EasyMock.expect(fsFactory.getFileSystem()).andReturn(fs);
    HudiTest forTest = createDataXWriter(Optional.of(fsFactory));
    DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
    File dataXCfg = folder.newFile();
    FileUtils.writeStringToFile(dataXCfg, "{job:{content:[{\"writer\":" + IOUtils.loadResourceFromClasspath(this.getClass(), hudi_datax_writer_assert_without_optional) + "}]}}", TisUTF8.get());
    List<File> dataXFiles = Lists.newArrayList(dataXCfg);
    EasyMock.expect(dataXProcessor.getDataxCfgFileNames(null)).andReturn(dataXFiles);
    DataxProcessor.processorGetter = (dataXName) -> {
        Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
        return dataXProcessor;
    };
    EasyMock.replay(dataXProcessor, fsFactory, fs);
    // IStreamTableCreator.IStreamTableMeta
    // streamTableMeta = forTest.writer.getStreamTableMeta(HudiWriter.targetTableName);
    // Assert.assertNotNull("streamTableMeta can not be null", streamTableMeta);
    // streamTableMeta.getColsMeta();
    // System.out.println(streamTableMeta.createFlinkTableDDL());
    // DataXHudiWriter.HudiStreamTemplateData tplData
    // = (DataXHudiWriter.HudiStreamTemplateData) forTest.writer.decorateMergeData(
    // new TestStreamTemplateData(HdfsFileSystemFactoryTestUtils.testDataXName, HudiWriter.targetTableName));
    // 
    // 
    // StringBuffer createTabDdl = tplData.getSinkFlinkTableDDL(HudiWriter.targetTableName);
    // Assert.assertNotNull(createTabDdl);
    // 
    // System.out.println(createTabDdl);
    EasyMock.verify(dataXProcessor, fsFactory, fs);
}
Also used : DataxProcessor(com.qlangtech.tis.datax.impl.DataxProcessor) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) IPath(com.qlangtech.tis.fs.IPath) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) File(java.io.File) FileSystemFactory(com.qlangtech.tis.offline.FileSystemFactory) Test(org.junit.Test)

Example 3 with ITISFileSystem

use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.

the class JoinHiveTask method processJoinTask.

/**
 * 处理join表,是否需要自动创建表或者删除重新创建表
 *
 * @param sql
 */
private void processJoinTask(String sql) {
    try {
        final HiveInsertFromSelectParser insertParser = getSQLParserResult(sql);
        final Connection conn = this.getTaskContext().getObj();
        // final DumpTable dumpTable =
        // DumpTable.createTable(insertParser.getTargetTableName());
        final EntityName dumpTable = EntityName.parse(this.getName());
        final String path = FSHistoryFileUtils.getJoinTableStorePath(fileSystem.getRootDir(), dumpTable).replaceAll("\\.", Path.SEPARATOR);
        if (fileSystem == null) {
            throw new IllegalStateException("fileSys can not be null");
        }
        ITISFileSystem fs = fileSystem;
        IPath parent = fs.getPath(path);
        initializeHiveTable(this.fileSystem, parent, mrEngine, HdfsFormat.DEFAULT_FORMAT, insertParser.getCols(), insertParser.getColsExcludePartitionCols(), conn, dumpTable, ITableDumpConstant.MAX_PARTITION_SAVE);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : EntityName(com.qlangtech.tis.sql.parser.tuple.creator.EntityName) IPath(com.qlangtech.tis.fs.IPath) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) Connection(java.sql.Connection) ParseException(org.apache.hadoop.hive.ql.parse.ParseException) HiveInsertFromSelectParser(com.qlangtech.tis.hive.HiveInsertFromSelectParser)

Example 4 with ITISFileSystem

use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.

the class BasicEngineJob method initializeHiveTable.

protected void initializeHiveTable(List<HiveColumn> cols) {
    try {
        TT writerPlugin = getWriterPlugin();
        try (Connection conn = writerPlugin.getConnection()) {
            Objects.requireNonNull(this.tabDumpParentPath, "tabDumpParentPath can not be null");
            ITISFileSystem fs = this.getFileSystem();
            JoinHiveTask.initializeHiveTable(fs, fs.getPath(new HdfsPath(this.tabDumpParentPath), ".."), writerPlugin.getEngineType(), parseFSFormat(), cols, colsExcludePartitionCols, conn, dumpTable, this.ptRetainNum);
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) Connection(java.sql.Connection) SQLException(java.sql.SQLException) IOException(java.io.IOException)

Example 5 with ITISFileSystem

use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.

the class TestDataXSparkWriter method testDataDump.

public void testDataDump() throws Exception {
    // final DataxWriter dataxWriter = DataxWriter.load(null, mysql2hiveDataXName);
    HdfsFileSystemFactory hdfsFileSystemFactory = HdfsFileSystemFactoryTestUtils.getFileSystemFactory();
    ITISFileSystem fileSystem = hdfsFileSystemFactory.getFileSystem();
    final DefaultHiveConnGetter hiveConnGetter = new DefaultHiveConnGetter();
    hiveConnGetter.dbName = "tis";
    hiveConnGetter.hiveAddress = "192.168.28.200:10000";
    // HdfsPath historicalPath = new HdfsPath(hdfsFileSystemFactory.rootDir + "/" + hiveConnGetter.dbName + "/customer_order_relation");
    // fileSystem.delete(historicalPath, true);
    final DataXSparkWriter dataxWriter = new DataXSparkWriter() {

        @Override
        public IHiveConnGetter getHiveConnGetter() {
            return hiveConnGetter;
        }

        @Override
        public FileSystemFactory getFs() {
            return hdfsFileSystemFactory;
        }

        @Override
        public Class<?> getOwnerClass() {
            return DataXSparkWriter.class;
        }
    };
    DataxWriter.dataxWriterGetter = (name) -> {
        assertEquals(mysql2hiveDataXName, name);
        return dataxWriter;
    };
    WriterTemplate.realExecuteDump("spark-datax-writer-assert-without-option-val.json", dataxWriter);
}
Also used : DefaultHiveConnGetter(com.qlangtech.tis.hive.DefaultHiveConnGetter) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) HdfsFileSystemFactory(com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory)

Aggregations

ITISFileSystem (com.qlangtech.tis.fs.ITISFileSystem)14 IPath (com.qlangtech.tis.fs.IPath)6 IPathInfo (com.qlangtech.tis.fs.IPathInfo)4 HdfsPath (com.qlangtech.tis.hdfs.impl.HdfsPath)3 FileSystemFactory (com.qlangtech.tis.offline.FileSystemFactory)3 IOException (java.io.IOException)3 Connection (java.sql.Connection)3 HdfsFileSystemFactory (com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory)2 IJoinTaskContext (com.qlangtech.tis.order.center.IJoinTaskContext)2 EntityName (com.qlangtech.tis.sql.parser.tuple.creator.EntityName)2 File (java.io.File)2 Configuration (com.alibaba.datax.common.util.Configuration)1 HdfsColMeta (com.alibaba.datax.plugin.writer.hdfswriter.HdfsColMeta)1 HdfsWriterErrorCode (com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriterErrorCode)1 Key (com.alibaba.datax.plugin.writer.hdfswriter.Key)1 Queues (com.google.common.collect.Queues)1 Sets (com.google.common.collect.Sets)1 ITISCoordinator (com.qlangtech.tis.cloud.ITISCoordinator)1 MockZKUtils (com.qlangtech.tis.cloud.MockZKUtils)1 DumpJobId (com.qlangtech.tis.cloud.dump.DumpJobId)1