use of com.qlangtech.tis.solrdao.ISchemaField in project plugins by qlangtech.
the class DataXElasticsearchWriter method mergeFromStupidModel.
/**
* 小白模式转专家模式,正好与方法projectionFromExpertModel相反
*
* @param schema
* @param expertSchema
* @return
*/
@Override
public JSONObject mergeFromStupidModel(ISchema schema, JSONObject expertSchema) {
JSONArray mergeTarget = expertSchema.getJSONArray(ESTableAlias.KEY_COLUMN);
Objects.requireNonNull(mergeTarget, "mergeTarget can not be null");
JSONObject f = null;
Map<String, JSONObject> mergeFields = Maps.newHashMap();
for (int i = 0; i < mergeTarget.size(); i++) {
f = mergeTarget.getJSONObject(i);
mergeFields.put(f.getString("name"), f);
}
JSONArray jFields = new com.alibaba.fastjson.JSONArray();
for (ISchemaField field : schema.getSchemaFields()) {
if (StringUtils.isBlank(field.getName())) {
throw new IllegalStateException("field name can not be null");
}
f = mergeFields.get(field.getName());
if (f == null) {
f = new JSONObject();
f.put(ISchemaField.KEY_NAME, field.getName());
}
VisualType type = EsTokenizerType.visualTypeMap.get(field.getTisFieldTypeName());
if (type.isSplit()) {
if (StringUtils.isEmpty(field.getTokenizerType())) {
throw new IllegalStateException("field:" + field.getName() + " relevant type is tokenizer but has not set analyzer");
}
if (StringUtils.endsWithIgnoreCase(field.getTokenizerType(), EsTokenizerType.NULL.getKey())) {
f.put(ISchemaField.KEY_TYPE, EsTokenizerType.NULL.getKey());
f.remove(ISchemaField.KEY_ANALYZER);
} else {
f.put(ISchemaField.KEY_TYPE, type.getType());
f.put(ISchemaField.KEY_ANALYZER, field.getTokenizerType());
}
} else {
f.put(ISchemaField.KEY_TYPE, type.getType());
f.remove(ISchemaField.KEY_ANALYZER);
}
// TODO 还不确定array 是否对应multiValue的语义
f.put(ISchemaField.KEY_ARRAY, field.isMultiValue());
f.put(ISchemaField.KEY_DOC_VALUES, field.isDocValue());
f.put(ISchemaField.KEY_INDEX, field.isIndexed());
f.put(ISchemaField.KEY_STORE, field.isStored());
if (field.isUniqueKey()) {
f.put(ISchemaField.KEY_PK, true);
}
if (field.isSharedKey()) {
f.put(ISchemaField.KEY_SHARE_KEY, true);
}
jFields.add(f);
}
expertSchema.put(ESTableAlias.KEY_COLUMN, jFields);
return expertSchema;
}
use of com.qlangtech.tis.solrdao.ISchemaField in project tis by qlangtech.
the class CollectionAction method createCollection.
/**
* 创建索引实例
*
* @param context
* @param df
* @param indexName
* @param targetColMetas
* @throws Exception
*/
private Optional<Application> createCollection(Context context, WorkFlow df, String indexName, TargetColumnMeta targetColMetas) throws Exception {
Objects.requireNonNull(df, "param df can not be null");
CreateIndexConfirmModel confirmModel = new CreateIndexConfirmModel();
SelectableServer.ServerNodeTopology coreNode = new SelectableServer.ServerNodeTopology();
SelectableServer.CoreNode[] coreNodeInfo = SelectableServer.getCoreNodeInfo(this.getRequest(), this, false, true);
// FIXME 这一步应该是去掉的最终提交的host内容应该是一个ip格式的,应该是取getNodeName的内容,UI中的内容应该要改一下
for (SelectableServer.CoreNode n : coreNodeInfo) {
n.setHostName(n.getNodeName());
}
coreNode.setReplicaCount(1);
coreNode.setShardCount(SHARED_COUNT);
coreNode.setHosts(coreNodeInfo);
confirmModel.setCoreNode(coreNode);
confirmModel.setTplAppId(getTemplateApp(this).getAppId());
ExtendApp extendApp = new ExtendApp();
extendApp.setDptId(SysInitializeAction.DEPARTMENT_DEFAULT_ID);
extendApp.setName(indexName);
extendApp.setRecept(this.getUser().getName());
Objects.requireNonNull(df.getId(), "id of dataflow can not be null");
extendApp.setWorkflow(df.getId() + ":" + df.getName());
confirmModel.setAppform(extendApp);
SchemaResult schemaResult = SchemaAction.mergeWfColsWithTplCollection(this, context, null, ISchemaPluginContext.NULL, (cols, schemaParseResult) -> {
ColumnMetaData pkMeta = targetColMetas.getPKMeta();
PSchemaField field = null;
ColMetaTuple rft = null;
TargetCol tcol = null;
final Map<String, ColMetaTuple> targetCols = targetColMetas.getTargetCols();
for (ISchemaField f : schemaParseResult.getSchemaFields()) {
field = (PSchemaField) f;
rft = targetCols.get(f.getName());
if (rft == null) {
throw new IllegalStateException("field:" + f.getName() + " relevant reflect 'SchemaFieldType' can not be null");
}
boolean isPk = false;
if (StringUtils.equals(pkMeta.getKey(), field.getName())) {
// 设置主键
isPk = true;
field.setIndexed(true);
field.setType(schemaParseResult.getTisType(ReflectSchemaFieldType.STRING.literia));
} else {
field.setType(schemaParseResult.getTisType(rft.getSchemaFieldType()));
}
tcol = targetColMetas.targetColMap.get(field.getName());
if (tcol != null) {
if (tcol.isIndexable()) {
field.setIndexed(true);
}
if (rft.colMeta.getSchemaFieldType().tokenizer) {
if (StringUtils.isNotEmpty(tcol.getToken())) {
field.setTokenizerType(tcol.getToken());
} else {
// 主键不需要分词
if (!isPk && rft.isTypeOf(ReflectSchemaFieldType.STRING)) {
// String类型默认使用like分词
field.setTokenizerType(ReflectSchemaFieldType.LIKE.literia);
}
}
}
}
}
schemaParseResult.setUniqueKey(pkMeta.getKey());
schemaParseResult.setSharedKey(pkMeta.getKey());
});
// 创建索引实例
return this.createCollection(context, confirmModel, schemaResult, (ctx, app, publishSnapshotId, schemaContent) -> {
return this.createNewApp(ctx, app, publishSnapshotId, schemaContent);
});
}
use of com.qlangtech.tis.solrdao.ISchemaField in project tis by qlangtech.
the class TestCollectionAction method testDoCreate.
public void testDoCreate() throws Exception {
this.clearUpDB();
ITISCoordinator zkCoordinator = MockZKUtils.createZkMock();
MockZooKeeperGetter.mockCoordinator = zkCoordinator;
// IExpectationSetters<byte[]> iExpectationSetters = createCoordinatorMock((coord) -> {
// // EasyMock.expect(coord.getChildren(ZkUtils.ZK_ASSEMBLE_LOG_COLLECT_PATH, null, true))
// // .andReturn();
// createAssembleLogCollectPathMock(coord);
// });
// iExpectationSetters.times(2);
TISZkStateReader tisZkStateReader = buildTisZkStateReaderMock();
SelectableServer.CoreNode coreNode = new SelectableServer.CoreNode();
coreNode.setHostName("hostname");
coreNode.setNodeName("nodename");
EasyMock.expect(tisZkStateReader.getSelectTableNodes()).andReturn(Collections.singletonList(coreNode));
request.setParameter("emethod", "create");
request.setParameter("action", "collection_action");
JSONObject content = getPostJSONContent(TEST_TABLE_EMPLOYEES_NAME);
request.setContent(content.toJSONString().getBytes(TisUTF8.get()));
ActionProxy proxy = getActionProxy();
AtomicReference<AppKey> appKeyRef = new AtomicReference<>();
AddAppAction.appKeyProcess = (key) -> {
appKeyRef.set(key);
};
AtomicBoolean schemaParseResultProcessed = new AtomicBoolean(false);
SchemaAction.parseResultCallback4test = (cols, schemaParseResult) -> {
List<PSchemaField> schemaFields = ((ParseResult) schemaParseResult).getSchemaFields();
assertNotNull(schemaFields);
assertEquals(8, schemaFields.size());
Map<String, ISchemaField> fields = schemaFields.stream().collect(Collectors.toMap((c) -> c.getName(), (c) -> c));
String emp_no = "emp_no";
ISchemaField pk = fields.get(emp_no);
assertNotNull(pk);
assertTrue(StringUtils.isEmpty(pk.getTokenizerType()));
assertEquals(ReflectSchemaFieldType.STRING.literia, pk.getTisFieldTypeName());
assertEquals(emp_no, schemaParseResult.getUniqueKey());
assertEquals(emp_no, schemaParseResult.getSharedKey());
String birth_date = "birth_date";
ISchemaField field = fields.get(birth_date);
assertNotNull(field);
assertEquals(ReflectSchemaFieldType.DATE.literia, field.getTisFieldTypeName());
assertTrue(StringUtils.isEmpty(field.getTokenizerType()));
// String first_name = "first_name";
field = fields.get(FIELD_EMPLOYEES_FIRST_NAME);
assertNotNull(field);
assertEquals(ReflectSchemaFieldType.STRING.literia, field.getTisFieldTypeName());
assertEquals(ReflectSchemaFieldType.LIKE.literia, field.getTokenizerType());
// String last_name = "last_name";
field = fields.get(FIELD_EMPLOYEES_LAST_NAME);
assertNotNull(field);
assertEquals(ReflectSchemaFieldType.STRING.literia, field.getTisFieldTypeName());
assertEquals(ReflectSchemaFieldType.LIKE.literia, field.getTokenizerType());
String gender = "gender";
field = fields.get(gender);
assertNotNull(field);
assertEquals(ReflectSchemaFieldType.STRING.literia, field.getTisFieldTypeName());
assertTrue(StringUtils.isEmpty(field.getTokenizerType()));
String hire_date = "hire_date";
field = fields.get(hire_date);
assertNotNull(field);
assertEquals(ReflectSchemaFieldType.DATE.literia, field.getTisFieldTypeName());
assertTrue(StringUtils.isEmpty(field.getTokenizerType()));
schemaParseResultProcessed.set(true);
};
this.replay();
// 执行
String result = proxy.execute();
// assertEquals(Action.NONE, result);
AjaxValve.ActionExecResult actionExecResult = showBizResult();
CoreAction.TriggerBuildResult triggerResult = (CoreAction.TriggerBuildResult) actionExecResult.getBizResult();
assertNotNull("triggerResult can not be null", triggerResult);
assertTrue(triggerResult.success);
assertEquals("taskId must large than 0", 1234, triggerResult.getTaskid());
// SnapshotDomain snapshotDomain = HttpConfigFileReader.getResource(COLLECTION_NAME, targetSnapshotid, RunEnvironment.getSysRuntime(), ConfigFileReader.getAry);
// 判断缓存中应该已经有snapshotDomain了
assertNotNull("appKeyRef can not be null", appKeyRef.get());
SnapshotDomain snapshotDomain = LoadSolrCoreConfigByAppNameServlet.getSnapshotDomain(ConfigFileReader.getConfigList(), appKeyRef.get().setFromCache(true), null);
assertNotNull("snapshotDomain can not null", snapshotDomain);
assertTrue(actionExecResult.isSuccess());
assertTrue("schemaParseResultProcessed must be processd", schemaParseResultProcessed.get());
this.verifyAll();
AtomicBoolean executed = new AtomicBoolean(false);
SolrFieldsParser.fieldTypeVisitor = (nodes) -> {
NamedNodeMap tokenizerAttrs = null;
outter: for (int i = 0; i < nodes.getLength(); i++) {
Node node = nodes.item(i);
NamedNodeMap attrs = node.getAttributes();
String typeName = DOMUtil.getAttr(attrs, "name");
if ("like".equals(typeName)) {
NodeList childNodes = node.getChildNodes();
for (int ii = 0; ii < childNodes.getLength(); ii++) {
Node item = childNodes.item(ii);
if ("analyzer".equals(item.getNodeName())) {
Node tokenizerNode = null;
NodeList analyzerChildNodes = item.getChildNodes();
for (int jj = 0; jj < analyzerChildNodes.getLength(); jj++) {
tokenizerNode = analyzerChildNodes.item(jj);
if ("tokenizer".equals(tokenizerNode.getNodeName())) {
tokenizerAttrs = tokenizerNode.getAttributes();
assertEquals(ISnapshotViewDAO.KEY_MIN_GRAM_SIZE, minGramSize, Integer.parseInt(DOMUtil.getAttr(tokenizerAttrs, ISnapshotViewDAO.KEY_MIN_GRAM_SIZE)));
assertEquals(ISnapshotViewDAO.KEY_MAX_GRAM_SIZE, maxGramSize, Integer.parseInt(DOMUtil.getAttr(tokenizerAttrs, ISnapshotViewDAO.KEY_MAX_GRAM_SIZE)));
break outter;
}
}
assertNotNull("tokenizerNode can not be null", tokenizerNode);
// =childNodes.item(0).getChildNodes().item(0);
break;
}
}
}
}
assertNotNull("tokenizerAttrs can not be null", tokenizerAttrs);
executed.set(true);
};
SolrFieldsParser.parse(() -> {
return snapshotDomain.getSolrSchema().getContent();
});
assertTrue("must have execute", executed.get());
}
Aggregations