use of com.qlangtech.tis.order.center.IJoinTaskContext in project plugins by qlangtech.
the class UnionHiveTask method parseSubTab.
private void parseSubTab() {
IJoinTaskContext chainContext = getContext().getExecContext();
for (String subTaskSql : getSubTaskSqls()) {
HiveInsertFromSelectParser parser = new HiveInsertFromSelectParser();
// try {
// StringWriter writer = new StringWriter();
// velocityEngine.evaluate(velocityContext, writer, "sql", subTaskSql);
// subTaskSql = writer.toString();
// IOUtils.close(writer);
parser.start(subTaskSql);
parserList.add(parser);
parser.getCols().stream().filter(column -> !partitionColumns.contains(column.getName())).forEach(column -> columnSet.add(column.getName()));
// } catch (IOException | ParseException e) {
// throw new IllegalStateException("parse sub table " + e.getMessage(), e);
// }
}
columnSet.addAll(partitionColumns);
// FIXME: in order to pass the compile phase ,make setContent comment
// setContent(getUnionSql());
chainContext.setAttribute(IParamContext.KEY_BUILD_TARGET_TABLE_NAME, this.getTableName());
chainContext.setAttribute("colsExcludePartitionColsList", columnSet.stream().filter(column -> !partitionColumns.contains(column)).collect(Collectors.toList()));
}
use of com.qlangtech.tis.order.center.IJoinTaskContext in project plugins by qlangtech.
the class LocalIndexBuilderTriggerFactory method createBuildJob.
/**
* 执行索引触发任务
*
* @param timePoint
* @param indexName
* @param groupNum
* @param buildParam
* @return
* @throws Exception
*/
@Override
public IRemoteJobTrigger createBuildJob(IJoinTaskContext execContext, String timePoint, String indexName, String groupNum, IIndexBuildParam buildParam) throws Exception {
String coreName = buildParam.getCoreName(Integer.parseInt(groupNum));
Map<String, String> params = Maps.newHashMap();
params.put(IndexBuildParam.INDEXING_BUILD_TABLE_TITLE_ITEMS, buildParam.getBuildTableTitleItems());
params.put(IndexBuildParam.JOB_TYPE, IndexBuildParam.JOB_TYPE_INDEX_BUILD);
params.put(IndexBuildParam.INDEXING_MAX_DOC_FAILD_LIMIT, "100");
ITISFileSystem fs = this.getFileSystem();
params.put(IndexBuildParam.INDEXING_SOLRCONFIG_PATH, ConfigFileReader.FILE_SOLR.getFsPath(fs, coreName));
params.put(IndexBuildParam.INDEXING_SCHEMA_PATH, ConfigFileReader.FILE_SCHEMA.getFsPath(fs, coreName));
params.put(IndexBuildParam.INDEXING_MAX_NUM_SEGMENTS, "1");
params.put(IndexBuildParam.INDEXING_GROUP_NUM, "0");
params.put(IndexBuildParam.INDEXING_SERVICE_NAME, indexName);
// params.put(IndexBuildParam.INDEXING_BUILDER_TRIGGER_FACTORY, "yarn-index-build");
params.put(IndexBuildParam.INDEXING_INCR_TIME, timePoint);
// IndexBuildSourcePathCreator pathCreator = buildParam.getHdfsSourcePath();
// Objects.requireNonNull(pathCreator, "pathCreator can not be null");
// File dumpRoot = LocalTableDumpFactory.getLocalOfflineRootDir();
// File tableRoot = new File(dumpRoot, DB_EMPLOYEES + "/" + TABLE_EMPLOYEES + "/all");
// IJoinTaskContext execContext, String group, ITabPartition ps
IndexBuildSourcePathCreator indexBuildSourcePathCreator = this.createIndexBuildSourcePathCreator(execContext, () -> timePoint);
String sourcePath = URLEncoder.encode(indexBuildSourcePathCreator.build(groupNum), TisUTF8.getName());
params.put(IndexBuildParam.INDEXING_SOURCE_PATH, sourcePath);
// "/user/admin/search4totalpay/all/0/output/20200525134425"
params.put(IndexBuildParam.INDEXING_OUTPUT_PATH, ImportDataProcessInfo.createIndexDir(this.getFileSystem(), timePoint, groupNum, indexName, false));
params.put(IndexBuildParam.INDEXING_CORE_NAME, coreName);
params.put(IParamContext.KEY_TASK_ID, String.valueOf(buildParam.getTaskId()));
params.put(IndexBuildParam.INDEXING_ROW_COUNT, "999");
TaskContext context = TaskContext.create(params);
context.setCoordinator(buildParam.getCoordinator());
return LocalTableDumpFactory.triggerTask(context, (rpc) -> {
IndexBuilderTriggerFactory buildTrigger = LocalIndexBuilderTriggerFactory.this;
IndexBuildNodeMaster.executeIndexBuild(context, buildTrigger, rpc, false);
});
}
use of com.qlangtech.tis.order.center.IJoinTaskContext in project plugins by qlangtech.
the class TestLocalTableDumpAndIndex method testSingleTableDump.
public void testSingleTableDump() throws Exception {
LocalTableDumpFactory tableDumpFactory = new LocalTableDumpFactory();
File dumpRoot = LocalTableDumpFactory.getLocalOfflineRootDir();
tableDumpFactory.name = "test";
DataSourceFactory mockEmployeesDataSource = MockDataSourceFactory.getMockEmployeesDataSource();
tableDumpFactory.setDataSourceFactoryGetter((tab) -> {
return mockEmployeesDataSource;
});
ITISCoordinator zkCoordinator = MockZKUtils.createZkMock();
// search4(.+?)_shard(\d+?)_replica_n(\d+?)
String mockSolrCore = INDEX_COLLECTION + "_shard1_replica_n1";
IJoinTaskContext execContext = this.mock("execContext", IJoinTaskContext.class);
// EntityName targetTableName = EntityName.parse(DB_EMPLOYEES+"."); ctx.getAttribute(IParamContext.KEY_BUILD_TARGET_TABLE_NAME);
EasyMock.expect(execContext.getAttribute(IParamContext.KEY_BUILD_TARGET_TABLE_NAME)).andReturn(getEmployeeTab()).anyTimes();
replay();
int round = 0;
ArrayDeque<Date> createDates = Queues.newArrayDeque();
// 一共测试5轮
Date timestamp = null;
while (round++ < 5) {
timestamp = new Date();
createDates.addLast(timestamp);
TaskContext taskContext = MockTaskContextUtils.create(timestamp);
taskContext.setCoordinator(zkCoordinator);
/**
* -----------------------------------------------------------
* 开始执行数据导入流程
* -----------------------------------------------------------
*/
startDump(tableDumpFactory, taskContext);
/**
* -----------------------------------------------------------
* 开始执行索引构建流程
* -----------------------------------------------------------
*/
startIndexBuild(mockSolrCore, execContext, zkCoordinator, MockTaskContextUtils.timeFormatYyyyMMddHHmmss.get().format(timestamp));
Thread.sleep(1000);
}
int index = 0;
File tableRoot = new File(dumpRoot, DB_EMPLOYEES + "/" + TABLE_EMPLOYEES + "/all");
assertTrue(tableRoot.exists());
String[] subTimeStampFiles = tableRoot.list();
Set<String> timestamps = Sets.newHashSet();
int maxHistorySave = ITableDumpConstant.MAX_PARTITION_SAVE + 1;
while (index++ < maxHistorySave) {
timestamps.add(MockTaskContextUtils.timeFormatYyyyMMddHHmmss.get().format(createDates.pollLast()));
}
assertEquals("maxHistorySave", maxHistorySave, subTimeStampFiles.length);
for (String subFile : subTimeStampFiles) {
assertTrue("shall contain file:" + new File(tableRoot, subFile), timestamps.contains(subFile));
// TODO 继续校验文件夹中的内容是否正确
}
File indexBuildRoot = new File(dumpRoot, INDEX_COLLECTION + "/all/0/output");
for (String indexBuildRootSub : indexBuildRoot.list()) {
assertTrue("shall contain file:" + new File(indexBuildRoot, indexBuildRootSub), timestamps.contains(indexBuildRootSub));
}
verifyAll();
}
use of com.qlangtech.tis.order.center.IJoinTaskContext in project plugins by qlangtech.
the class TaskExec method getRemoteJobTrigger.
static IRemoteTaskTrigger getRemoteJobTrigger(DataXJobSubmit.IDataXJobContext jobContext, LocalDataXJobSubmit localDataXJobSubmit, String dataXfileName) {
// final JarLoader uberClassLoader = new TISJarLoader(pluginManager);
IJoinTaskContext taskContext = jobContext.getTaskContext();
AtomicBoolean complete = new AtomicBoolean(false);
AtomicBoolean success = new AtomicBoolean(false);
return new IRemoteTaskTrigger() {
DataXJobSingleProcessorExecutor jobConsumer;
boolean hasCanceled;
// final ExecutorService dataXExecutor = jobContext.getContextInstance();
@Override
public void run() {
// dataXExecutor.submit(() -> {
try {
MDC.put(IParamContext.KEY_TASK_ID, String.valueOf(taskContext.getTaskId()));
MDC.put(TISCollectionUtils.KEY_COLLECTION, taskContext.getIndexName());
jobConsumer = new DataXJobSingleProcessorExecutor() {
@Override
protected DataXJobSubmit.InstanceType getExecMode() {
return DataXJobSubmit.InstanceType.LOCAL;
}
@Override
protected String getClasspath() {
return localDataXJobSubmit.getClasspath();
}
@Override
protected boolean useRuntimePropEnvProps() {
return false;
}
@Override
protected String[] getExtraJavaSystemPrams() {
return new String[] { // "-D" + SYSTEM_KEY_LOGBACK_PATH_KEY + "=" + SYSTEM_KEY_LOGBACK_PATH_VALUE
"-D" + CenterResource.KEY_notFetchFromCenterRepository + "=true" };
}
@Override
protected String getIncrStateCollectAddress() {
return ZkUtils.getFirstChildValue(((IExecChainContext) taskContext).getZkClient(), ZkUtils.ZK_ASSEMBLE_LOG_COLLECT_PATH);
}
@Override
protected String getMainClassName() {
return localDataXJobSubmit.getMainClassName();
}
@Override
protected File getWorkingDirectory() {
return localDataXJobSubmit.getWorkingDirectory();
}
};
CuratorDataXTaskMessage dataXJob = localDataXJobSubmit.getDataXJobDTO(taskContext, dataXfileName);
// new CuratorDataXTaskMessage();
// dataXJob.setJobId(taskContext.getTaskId());
// dataXJob.setJobName(dataXfileName);
// dataXJob.setDataXName(taskContext.getIndexName());
jobConsumer.consumeMessage(dataXJob);
success.set(true);
} catch (Throwable e) {
// e.printStackTrace();
success.set(false);
if (this.hasCanceled) {
logger.warn("datax:" + taskContext.getIndexName() + " has been canceled");
} else {
logger.error("datax:" + taskContext.getIndexName() + ",jobName:" + dataXfileName, e);
if (!(e instanceof DataXJobSingleProcessorException)) {
throw new RuntimeException(e);
}
}
} finally {
complete.set(true);
// shutdownExecutor();
}
// });
}
// private void shutdownExecutor() {
// try {
// dataXExecutor.shutdownNow();
// } catch (Throwable e) {
// logger.error(e.getMessage(), e);
// }
// }
@Override
public String getTaskName() {
return dataXfileName;
}
@Override
public void cancel() {
if (jobConsumer == null) {
return;
}
jobConsumer.runningTask.forEach((taskId, watchdog) -> {
watchdog.destroyProcess();
logger.info("taskId:{} relevant task has been canceled", taskId);
});
// shutdownExecutor();
this.hasCanceled = true;
}
@Override
public RunningStatus getRunningStatus() {
return new RunningStatus(0, complete.get(), success.get());
}
};
}
use of com.qlangtech.tis.order.center.IJoinTaskContext in project tis by qlangtech.
the class SqlTaskNodeMeta method getRewriteSql.
// private static class EntryPair implements Map.Entry<IDumpTable, ITabPartition> {
// private final IDumpTable key;
// private final ITabPartition val;
//
// public EntryPair(IDumpTable key, ITabPartition val) {
// this.key = key;
// this.val = val;
// }
//
// @Override
// public IDumpTable getKey() {
// return key;
// }
//
// @Override
// public ITabPartition getValue() {
// return val;
// }
//
// @Override
// public ITabPartition setValue(ITabPartition value) {
// return null;
// }
// }
@Override
public RewriteSql getRewriteSql(String taskName, TabPartitions dumpPartition, IPrimaryTabFinder erRules, ITemplateContext templateContext, boolean isFinalNode) {
if (dumpPartition.size() < 1) {
throw new IllegalStateException("dumpPartition set size can not small than 1");
}
Optional<List<Expression>> parameters = Optional.empty();
IJoinTaskContext joinContext = templateContext.getExecContext();
SqlStringBuilder builder = new SqlStringBuilder();
SqlRewriter rewriter = new SqlRewriter(builder, dumpPartition, erRules, parameters, isFinalNode, joinContext);
// 执行rewrite
try {
Statement state = getSqlStatement();
rewriter.process(state, 0);
} catch (TisSqlFormatException e) {
throw e;
} catch (Exception e) {
// dumpPartition.entrySet().stream().map((ee) -> "[" + ee.getKey() + "->" + ee.getValue().getPt() + "]").collect(Collectors.joining(","));
String dp = dumpPartition.toString();
throw new IllegalStateException("task:" + taskName + ",isfinalNode:" + isFinalNode + ",dump tabs pt:" + dp + "\n" + e.getMessage(), e);
}
SqlRewriter.AliasTable primaryTable = rewriter.getPrimayTable();
if (primaryTable == null) {
throw new IllegalStateException("task:" + taskName + " has not find primary table");
}
// return ;
return new RewriteSql(builder.toString(), rewriter.getPrimayTable());
}
Aggregations