use of com.qlangtech.tis.fullbuild.indexbuild.TaskContext in project plugins by qlangtech.
the class LocalTableDumpFactory method createSingleTableDumpJob.
@Override
public IRemoteJobTrigger createSingleTableDumpJob(final IDumpTable table, TaskContext context) {
return triggerTask(context, (rpc) -> {
SingleTableDumpTask tableDumpTask = new SingleTableDumpTask((EntityName) table, LocalTableDumpFactory.this, getDataSourceFactory(table), context.getCoordinator().unwrap(), rpc) {
protected void registerZKDumpNodeIn(TaskContext context) {
}
};
// 开始执行数据dump
tableDumpTask.map(context);
});
}
use of com.qlangtech.tis.fullbuild.indexbuild.TaskContext in project plugins by qlangtech.
the class TestLocalTableDumpAndIndex method testSingleTableDump.
public void testSingleTableDump() throws Exception {
LocalTableDumpFactory tableDumpFactory = new LocalTableDumpFactory();
File dumpRoot = LocalTableDumpFactory.getLocalOfflineRootDir();
tableDumpFactory.name = "test";
DataSourceFactory mockEmployeesDataSource = MockDataSourceFactory.getMockEmployeesDataSource();
tableDumpFactory.setDataSourceFactoryGetter((tab) -> {
return mockEmployeesDataSource;
});
ITISCoordinator zkCoordinator = MockZKUtils.createZkMock();
// search4(.+?)_shard(\d+?)_replica_n(\d+?)
String mockSolrCore = INDEX_COLLECTION + "_shard1_replica_n1";
IJoinTaskContext execContext = this.mock("execContext", IJoinTaskContext.class);
// EntityName targetTableName = EntityName.parse(DB_EMPLOYEES+"."); ctx.getAttribute(IParamContext.KEY_BUILD_TARGET_TABLE_NAME);
EasyMock.expect(execContext.getAttribute(IParamContext.KEY_BUILD_TARGET_TABLE_NAME)).andReturn(getEmployeeTab()).anyTimes();
replay();
int round = 0;
ArrayDeque<Date> createDates = Queues.newArrayDeque();
// 一共测试5轮
Date timestamp = null;
while (round++ < 5) {
timestamp = new Date();
createDates.addLast(timestamp);
TaskContext taskContext = MockTaskContextUtils.create(timestamp);
taskContext.setCoordinator(zkCoordinator);
/**
* -----------------------------------------------------------
* 开始执行数据导入流程
* -----------------------------------------------------------
*/
startDump(tableDumpFactory, taskContext);
/**
* -----------------------------------------------------------
* 开始执行索引构建流程
* -----------------------------------------------------------
*/
startIndexBuild(mockSolrCore, execContext, zkCoordinator, MockTaskContextUtils.timeFormatYyyyMMddHHmmss.get().format(timestamp));
Thread.sleep(1000);
}
int index = 0;
File tableRoot = new File(dumpRoot, DB_EMPLOYEES + "/" + TABLE_EMPLOYEES + "/all");
assertTrue(tableRoot.exists());
String[] subTimeStampFiles = tableRoot.list();
Set<String> timestamps = Sets.newHashSet();
int maxHistorySave = ITableDumpConstant.MAX_PARTITION_SAVE + 1;
while (index++ < maxHistorySave) {
timestamps.add(MockTaskContextUtils.timeFormatYyyyMMddHHmmss.get().format(createDates.pollLast()));
}
assertEquals("maxHistorySave", maxHistorySave, subTimeStampFiles.length);
for (String subFile : subTimeStampFiles) {
assertTrue("shall contain file:" + new File(tableRoot, subFile), timestamps.contains(subFile));
// TODO 继续校验文件夹中的内容是否正确
}
File indexBuildRoot = new File(dumpRoot, INDEX_COLLECTION + "/all/0/output");
for (String indexBuildRootSub : indexBuildRoot.list()) {
assertTrue("shall contain file:" + new File(indexBuildRoot, indexBuildRootSub), timestamps.contains(indexBuildRootSub));
}
verifyAll();
}
Aggregations