use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.
the class TestLocalTableDumpAndIndex method startIndexBuild.
public void startIndexBuild(String solrCoreName, IJoinTaskContext execContext, ITISCoordinator zkCoordinator, String timePoint) throws Exception {
LocalIndexBuilderTriggerFactory builderTriggerFactory = new LocalIndexBuilderTriggerFactory();
File localOfflineDir = LocalTableDumpFactory.getLocalOfflineRootDir();
String indexName = ITestDumpCommon.INDEX_COLLECTION;
String groupNum = "0";
Integer taskId = 123;
ITISFileSystem fileSystem = builderTriggerFactory.getFileSystem();
ImportDataProcessInfo buildParam = new ImportDataProcessInfo(taskId, fileSystem, zkCoordinator);
buildParam.setIndexName(indexName);
MockDataSourceFactory employeesDataSource = MockDataSourceFactory.getMockEmployeesDataSource();
List<ColumnMetaData> eployeeTableMeta = employeesDataSource.getTableMetadata(TABLE_EMPLOYEES);
String colsLiteria = eployeeTableMeta.stream().map((c) -> c.getKey()).collect(Collectors.joining(","));
buildParam.setBuildTableTitleItems(colsLiteria);
SnapshotDomain snapshot = com.qlangtech.tis.manage.common.SnapshotDomainUtils.mockEmployeeSnapshotDomain();
snapshot.writeResource2fs(fileSystem, buildParam.getCoreName(Integer.parseInt(groupNum)), ConfigFileReader.FILE_SCHEMA);
snapshot.writeResource2fs(fileSystem, buildParam.getCoreName(Integer.parseInt(groupNum)), ConfigFileReader.FILE_SOLR);
IRemoteJobTrigger buildJob = builderTriggerFactory.createBuildJob(execContext, timePoint, indexName, groupNum, buildParam);
buildJob.submitJob();
/**
* -----------------------------------------------------------
* 开始执行索引build
* -----------------------------------------------------------
*/
TestLocalTableDumpAndIndex.waitJobTerminatorAndAssert(buildJob);
// long hdfsTimeStamp, String hdfsUser, SolrCore core, File indexDir, SolrQueryResponse rsp, String taskId
indexFlowback2SolrEngineNode(solrCoreName, timePoint, localOfflineDir, taskId);
}
use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.
the class TestDataXHudiWriter method testFlinkSqlTableDDLCreate.
@Test
public void testFlinkSqlTableDDLCreate() throws Exception {
FileSystemFactory fsFactory = EasyMock.createMock("fsFactory", FileSystemFactory.class);
ITISFileSystem fs = EasyMock.createMock("fileSystem", ITISFileSystem.class);
// fs.getRootDir()
String child = "default/customer_order_relation";
String dataDir = "hudi";
IPath rootPath = new HdfsPath(HdfsFileSystemFactoryTestUtils.DEFAULT_HDFS_ADDRESS + "/user/admin");
IPath tabPath = new HdfsPath(rootPath, child);
IPath hudiDataPath = new HdfsPath(tabPath, dataDir);
EasyMock.expect(fs.getPath(rootPath, child)).andReturn(tabPath);
EasyMock.expect(fs.getPath(tabPath, dataDir)).andReturn(hudiDataPath);
EasyMock.expect(fs.getRootDir()).andReturn(rootPath);
EasyMock.expect(fsFactory.getFileSystem()).andReturn(fs);
HudiTest forTest = createDataXWriter(Optional.of(fsFactory));
DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
File dataXCfg = folder.newFile();
FileUtils.writeStringToFile(dataXCfg, "{job:{content:[{\"writer\":" + IOUtils.loadResourceFromClasspath(this.getClass(), hudi_datax_writer_assert_without_optional) + "}]}}", TisUTF8.get());
List<File> dataXFiles = Lists.newArrayList(dataXCfg);
EasyMock.expect(dataXProcessor.getDataxCfgFileNames(null)).andReturn(dataXFiles);
DataxProcessor.processorGetter = (dataXName) -> {
Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
return dataXProcessor;
};
EasyMock.replay(dataXProcessor, fsFactory, fs);
// IStreamTableCreator.IStreamTableMeta
// streamTableMeta = forTest.writer.getStreamTableMeta(HudiWriter.targetTableName);
// Assert.assertNotNull("streamTableMeta can not be null", streamTableMeta);
// streamTableMeta.getColsMeta();
// System.out.println(streamTableMeta.createFlinkTableDDL());
// DataXHudiWriter.HudiStreamTemplateData tplData
// = (DataXHudiWriter.HudiStreamTemplateData) forTest.writer.decorateMergeData(
// new TestStreamTemplateData(HdfsFileSystemFactoryTestUtils.testDataXName, HudiWriter.targetTableName));
//
//
// StringBuffer createTabDdl = tplData.getSinkFlinkTableDDL(HudiWriter.targetTableName);
// Assert.assertNotNull(createTabDdl);
//
// System.out.println(createTabDdl);
EasyMock.verify(dataXProcessor, fsFactory, fs);
}
use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.
the class JoinHiveTask method processJoinTask.
/**
* 处理join表,是否需要自动创建表或者删除重新创建表
*
* @param sql
*/
private void processJoinTask(String sql) {
try {
final HiveInsertFromSelectParser insertParser = getSQLParserResult(sql);
final Connection conn = this.getTaskContext().getObj();
// final DumpTable dumpTable =
// DumpTable.createTable(insertParser.getTargetTableName());
final EntityName dumpTable = EntityName.parse(this.getName());
final String path = FSHistoryFileUtils.getJoinTableStorePath(fileSystem.getRootDir(), dumpTable).replaceAll("\\.", Path.SEPARATOR);
if (fileSystem == null) {
throw new IllegalStateException("fileSys can not be null");
}
ITISFileSystem fs = fileSystem;
IPath parent = fs.getPath(path);
initializeHiveTable(this.fileSystem, parent, mrEngine, HdfsFormat.DEFAULT_FORMAT, insertParser.getCols(), insertParser.getColsExcludePartitionCols(), conn, dumpTable, ITableDumpConstant.MAX_PARTITION_SAVE);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.
the class BasicEngineJob method initializeHiveTable.
protected void initializeHiveTable(List<HiveColumn> cols) {
try {
TT writerPlugin = getWriterPlugin();
try (Connection conn = writerPlugin.getConnection()) {
Objects.requireNonNull(this.tabDumpParentPath, "tabDumpParentPath can not be null");
ITISFileSystem fs = this.getFileSystem();
JoinHiveTask.initializeHiveTable(fs, fs.getPath(new HdfsPath(this.tabDumpParentPath), ".."), writerPlugin.getEngineType(), parseFSFormat(), cols, colsExcludePartitionCols, conn, dumpTable, this.ptRetainNum);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of com.qlangtech.tis.fs.ITISFileSystem in project plugins by qlangtech.
the class TestDataXSparkWriter method testDataDump.
public void testDataDump() throws Exception {
// final DataxWriter dataxWriter = DataxWriter.load(null, mysql2hiveDataXName);
HdfsFileSystemFactory hdfsFileSystemFactory = HdfsFileSystemFactoryTestUtils.getFileSystemFactory();
ITISFileSystem fileSystem = hdfsFileSystemFactory.getFileSystem();
final DefaultHiveConnGetter hiveConnGetter = new DefaultHiveConnGetter();
hiveConnGetter.dbName = "tis";
hiveConnGetter.hiveAddress = "192.168.28.200:10000";
// HdfsPath historicalPath = new HdfsPath(hdfsFileSystemFactory.rootDir + "/" + hiveConnGetter.dbName + "/customer_order_relation");
// fileSystem.delete(historicalPath, true);
final DataXSparkWriter dataxWriter = new DataXSparkWriter() {
@Override
public IHiveConnGetter getHiveConnGetter() {
return hiveConnGetter;
}
@Override
public FileSystemFactory getFs() {
return hdfsFileSystemFactory;
}
@Override
public Class<?> getOwnerClass() {
return DataXSparkWriter.class;
}
};
DataxWriter.dataxWriterGetter = (name) -> {
assertEquals(mysql2hiveDataXName, name);
return dataxWriter;
};
WriterTemplate.realExecuteDump("spark-datax-writer-assert-without-option-val.json", dataxWriter);
}
Aggregations