use of com.thinkbiganalytics.search.rest.model.HiveColumn in project kylo by Teradata.
the class SolrSearchResultTransform method getSchemaSearchResultData.
private SchemaSearchResultData getSchemaSearchResultData(Map<String, Object> solrDocumentFieldValueMap, QueryResponse solrResponse) {
SchemaSearchResultData schemaSearchResultData = new SchemaSearchResultData();
schemaSearchResultData.setDatabaseName(toString(solrDocumentFieldValueMap.get("databaseName")));
schemaSearchResultData.setDatabaseOwner(toString(solrDocumentFieldValueMap.get("databaseOwner")));
schemaSearchResultData.setTableCreateTime(toString(solrDocumentFieldValueMap.get("tableCreateTime")));
schemaSearchResultData.setTableName(toString(solrDocumentFieldValueMap.get("tableName")));
schemaSearchResultData.setTableType(toString(solrDocumentFieldValueMap.get("tableType")));
List<HiveColumn> hiveColumns = new ArrayList<>();
List<Pair> highlightsList = new ArrayList<>();
if (solrDocumentFieldValueMap.containsKey("columnName")) {
hiveColumns.add(new HiveColumn(toString(solrDocumentFieldValueMap.get("columnName")), toString(solrDocumentFieldValueMap.get("columnType")), toString(solrDocumentFieldValueMap.get("columnComment"))));
} else if (solrDocumentFieldValueMap.containsKey("hiveColumns")) {
@SuppressWarnings("unchecked") final List<Map<String, Object>> columns = ObjectMapperSerializer.deserialize(toString(solrDocumentFieldValueMap.get("hiveColumns")), List.class);
columns.forEach(column -> {
hiveColumns.add(new HiveColumn(toString(column.get("columnName")), toString(column.get("columnType")), toString(column.get("columnComment"))));
});
}
for (Map.Entry<String, List<String>> entry : solrResponse.getHighlighting().get(solrDocumentFieldValueMap.get("id")).entrySet()) {
String key = entry.getKey();
if (key.equals("columnName")) {
key = "Column name";
}
if (entry.getValue().size() >= 1) {
highlightsList.add(new Pair(key, entry.getValue().get(0)));
}
}
schemaSearchResultData.setHighlights(highlightsList);
schemaSearchResultData.setHiveColumns(hiveColumns);
schemaSearchResultData.setRawData(solrDocumentFieldValueMap);
schemaTypeResultCount += 1L;
return schemaSearchResultData;
}
use of com.thinkbiganalytics.search.rest.model.HiveColumn in project kylo by Teradata.
the class ElasticSearchRestService method transformElasticSearchRestResponse.
private ElasticSearchRestSearchResponse transformElasticSearchRestResponse(Response response) {
if (response == null) {
log.warn("Null response from Elasticsearch (rest client)");
return null;
}
ElasticSearchRestSearchResponse elasticSearchRestSearchResponse = new ElasticSearchRestSearchResponse();
elasticSearchRestSearchResponse.setRequestLine(response.getRequestLine().toString());
elasticSearchRestSearchResponse.setHost(response.getHost().toString());
elasticSearchRestSearchResponse.setStatusLine(response.getStatusLine().toString());
elasticSearchRestSearchResponse.setRawEntity(response.getEntity());
for (int i = 0; i < response.getHeaders().length; i++) {
List<String> currentHeaders = elasticSearchRestSearchResponse.getHeaders();
currentHeaders.add(response.getHeaders()[i].toString());
elasticSearchRestSearchResponse.setHeaders(currentHeaders);
}
HttpEntity entity = response.getEntity();
if (entity == null) {
return null;
}
try {
String entityString = EntityUtils.toString(response.getEntity());
JSONObject entityStringJsonObject = new JSONObject(entityString);
String tookInMs = entityStringJsonObject.getString("took");
elasticSearchRestSearchResponse.setTookInMillis(Long.parseLong(tookInMs));
JSONObject hitsJsonObject = entityStringJsonObject.getJSONObject("hits");
elasticSearchRestSearchResponse.setTotalResults(hitsJsonObject.getLong("total"));
JSONArray hitsJsonArray = hitsJsonObject.getJSONArray("hits");
List<ElasticSearchRestSearchHit> elasticSearchRestSearchHits = new ArrayList<>();
for (int i = 0; i < hitsJsonArray.length(); i++) {
ElasticSearchRestSearchHit elasticSearchRestSearchHit = new ElasticSearchRestSearchHit();
JSONObject currentHitJsonObject = new JSONObject(hitsJsonArray.get(i).toString());
elasticSearchRestSearchHit.setIndexName(currentHitJsonObject.get("_index").toString());
elasticSearchRestSearchHit.setIndexType(currentHitJsonObject.get("_type").toString());
JSONObject currentHitSourceJsonObject = new JSONObject(currentHitJsonObject.get("_source").toString());
elasticSearchRestSearchHit.setRawHit(currentHitJsonObject.get("_source").toString());
List<Pair> sourceList = new ArrayList<>();
Iterator sourceIterator = currentHitSourceJsonObject.keys();
while (sourceIterator.hasNext()) {
String sourceKey = (String) sourceIterator.next();
sourceList.add(new Pair(sourceKey, currentHitSourceJsonObject.get(sourceKey)));
if (sourceKey.equals("hiveColumns")) {
List<HiveColumn> hiveColumns = new ArrayList<>();
String newHiveColumns = "{\"a\":" + currentHitSourceJsonObject.get(sourceKey).toString() + "}";
JSONObject hiveColumnsJsonObject = new JSONObject(newHiveColumns);
JSONArray hiveColumnsJsonArray = hiveColumnsJsonObject.getJSONArray("a");
for (int x = 0; x < hiveColumnsJsonArray.length(); x++) {
JSONObject hiveColumnJsonObject = new JSONObject(hiveColumnsJsonArray.get(x).toString());
Iterator hiveColumnIterator = hiveColumnJsonObject.keys();
String columnName = "";
String columnType = "";
String columnComment = "";
while (hiveColumnIterator.hasNext()) {
String columnKey = (String) hiveColumnIterator.next();
switch(columnKey) {
case "columnName":
columnName = hiveColumnJsonObject.get(columnKey).toString();
break;
case "columnType":
columnType = hiveColumnJsonObject.get(columnKey).toString();
break;
case "columnComment":
columnComment = hiveColumnJsonObject.get(columnKey).toString();
break;
default:
break;
}
}
hiveColumns.add(new HiveColumn(columnName, columnType, columnComment));
}
elasticSearchRestSearchHit.setHiveColumns(hiveColumns);
}
sourceIterator.remove();
}
elasticSearchRestSearchHit.setSource(sourceList);
JSONObject currentHitHighlightJsonObject = new JSONObject(currentHitJsonObject.get("highlight").toString());
List<Pair> highlightsList = new ArrayList<>();
Iterator highlightIterator = currentHitHighlightJsonObject.keys();
while (highlightIterator.hasNext()) {
String highlightKey = (String) highlightIterator.next();
JSONArray highlightArray = currentHitHighlightJsonObject.getJSONArray(highlightKey);
if (highlightArray.length() > 0) {
highlightsList.add(new Pair(highlightKey, highlightArray.get(0)));
}
highlightIterator.remove();
}
elasticSearchRestSearchHit.setHighlights(highlightsList);
elasticSearchRestSearchHits.add(elasticSearchRestSearchHit);
}
elasticSearchRestSearchResponse.setElasticSearchRestSearchHits(elasticSearchRestSearchHits);
return elasticSearchRestSearchResponse;
} catch (IOException | JSONException exception) {
log.warn("An error occurred during decoding search result e=", exception);
}
return null;
}
use of com.thinkbiganalytics.search.rest.model.HiveColumn in project kylo by Teradata.
the class ElasticSearchRestSearchResultTransform method getSchemaSearchResultData.
private SchemaSearchResultData getSchemaSearchResultData(ElasticSearchRestSearchHit elasticSearchRestSearchHit) {
final String DATABASE_NAME = "databaseName";
final String DATABASE_OWNER = "databaseOwner";
final String TABLE_CREATE_TIME = "tableCreateTime";
final String TABLE_NAME = "tableName";
final String TABLE_TYPE = "tableType";
final String HIVE_COLUMNS = "hiveColumns";
final String COLUMN_TYPE = "columnType";
final String COLUMN_NAME = "columnName";
final String COLUMN_COMMENT = "columnComment";
final String HIVE_COLUMNS_COLUMN_TYPE = HIVE_COLUMNS + "." + COLUMN_TYPE;
final String HIVE_COLUMNS_COLUMN_TYPE_NEW_DESCRIPTION = "Column type";
final String HIVE_COLUMNS_COLUMN_NAME = HIVE_COLUMNS + "." + COLUMN_NAME;
final String HIVE_COLUMNS_COLUMN_NAME_NEW_DESCRIPTION = "Column name";
final String HIVE_COLUMNS_COLUMN_COMMENT = HIVE_COLUMNS + "." + COLUMN_COMMENT;
final String HIVE_COLUMNS_COLUMN_COMMENT_NEW_DESCRIPTION = "Column comment";
SchemaSearchResultData schemaSearchResultData = new SchemaSearchResultData();
schemaSearchResultData.setDatabaseName(elasticSearchRestSearchHit.findValueForKeyInSourceWithDefault(DATABASE_NAME, EMPTY_STRING).toString());
schemaSearchResultData.setDatabaseOwner(elasticSearchRestSearchHit.findValueForKeyInSourceWithDefault(DATABASE_OWNER, EMPTY_STRING).toString());
schemaSearchResultData.setTableCreateTime(elasticSearchRestSearchHit.findValueForKeyInSourceWithDefault(TABLE_CREATE_TIME, EMPTY_STRING).toString());
schemaSearchResultData.setTableName(elasticSearchRestSearchHit.findValueForKeyInSourceWithDefault(TABLE_NAME, EMPTY_STRING).toString());
schemaSearchResultData.setTableType(elasticSearchRestSearchHit.findValueForKeyInSourceWithDefault(TABLE_TYPE, EMPTY_STRING).toString());
List<HiveColumn> hiveColumns = new ArrayList<>();
List<Pair> highlightsList = new ArrayList<>();
if (!CollectionUtils.isEmpty(elasticSearchRestSearchHit.getHiveColumns())) {
hiveColumns.addAll(elasticSearchRestSearchHit.getHiveColumns());
}
for (Pair highlightPair : elasticSearchRestSearchHit.getHighlights()) {
String key = highlightPair.getKey();
switch(key) {
case HIVE_COLUMNS_COLUMN_TYPE:
key = HIVE_COLUMNS_COLUMN_TYPE_NEW_DESCRIPTION;
break;
case HIVE_COLUMNS_COLUMN_NAME:
key = HIVE_COLUMNS_COLUMN_NAME_NEW_DESCRIPTION;
break;
case HIVE_COLUMNS_COLUMN_COMMENT:
key = HIVE_COLUMNS_COLUMN_COMMENT_NEW_DESCRIPTION;
break;
default:
break;
}
highlightsList.add(new Pair(key, highlightPair.getValue()));
}
schemaSearchResultData.setHighlights(highlightsList);
schemaSearchResultData.setHiveColumns(hiveColumns);
Map<String, Object> rawData = new HashMap<>();
rawData.put(RAW_DATA_KEY, elasticSearchRestSearchHit.getRawHit());
schemaSearchResultData.setRawData(rawData);
schemaTypeResultCount += 1L;
return schemaSearchResultData;
}
use of com.thinkbiganalytics.search.rest.model.HiveColumn in project kylo by Teradata.
the class ElasticSearchSearchResultTransform method getSchemaSearchResultData.
private SchemaSearchResultData getSchemaSearchResultData(SearchHit searchHit) {
final String DATABASE_NAME = "databaseName";
final String DATABASE_OWNER = "databaseOwner";
final String TABLE_CREATE_TIME = "tableCreateTime";
final String TABLE_NAME = "tableName";
final String TABLE_TYPE = "tableType";
final String HIVE_COLUMNS = "hiveColumns";
final String COLUMN_TYPE = "columnType";
final String COLUMN_NAME = "columnName";
final String COLUMN_COMMENT = "columnComment";
final String HIVE_COLUMNS_COLUMN_TYPE = HIVE_COLUMNS + "." + COLUMN_TYPE;
final String HIVE_COLUMNS_COLUMN_TYPE_NEW_DESCRIPTION = "Column type";
final String HIVE_COLUMNS_COLUMN_NAME = HIVE_COLUMNS + "." + COLUMN_NAME;
final String HIVE_COLUMNS_COLUMN_NAME_NEW_DESCRIPTION = "Column name";
final String HIVE_COLUMNS_COLUMN_COMMENT = HIVE_COLUMNS + "." + COLUMN_COMMENT;
final String HIVE_COLUMNS_COLUMN_COMMENT_NEW_DESCRIPTION = "Column comment";
Map<String, Object> searchHitSourceMap = searchHit.sourceAsMap();
SchemaSearchResultData schemaSearchResultData = new SchemaSearchResultData();
schemaSearchResultData.setDatabaseName(searchHitSourceMap.getOrDefault(DATABASE_NAME, "").toString());
schemaSearchResultData.setDatabaseOwner(searchHitSourceMap.getOrDefault(DATABASE_OWNER, "").toString());
schemaSearchResultData.setTableCreateTime(searchHitSourceMap.getOrDefault(TABLE_CREATE_TIME, "").toString());
schemaSearchResultData.setTableName(searchHitSourceMap.getOrDefault(TABLE_NAME, "").toString());
schemaSearchResultData.setTableType(searchHitSourceMap.getOrDefault(TABLE_TYPE, "").toString());
List<HiveColumn> hiveColumns = new ArrayList<>();
List<Pair> highlightsList = new ArrayList<>();
@SuppressWarnings("unchecked") List<Map<String, String>> hiveColumnsListOfMaps = (List<Map<String, String>>) searchHitSourceMap.getOrDefault(HIVE_COLUMNS, Collections.emptyList());
for (Map<String, String> hiveColumnsMap : hiveColumnsListOfMaps) {
String columnName = "";
String columnType = "";
String columnComment = "";
for (Map.Entry<String, String> hiveColumnEntry : hiveColumnsMap.entrySet()) {
if (hiveColumnEntry.getKey().equals(COLUMN_TYPE)) {
columnType = hiveColumnEntry.getValue();
} else if (hiveColumnEntry.getKey().equals(COLUMN_NAME)) {
columnName = hiveColumnEntry.getValue();
} else if (hiveColumnEntry.getKey().equals(COLUMN_COMMENT)) {
columnComment = hiveColumnEntry.getValue();
}
}
hiveColumns.add(new HiveColumn(columnName, columnType, columnComment));
}
Map<String, HighlightField> highlights = searchHit.getHighlightFields();
for (Map.Entry<String, HighlightField> entry : highlights.entrySet()) {
String key = entry.getKey();
switch(key) {
case HIVE_COLUMNS_COLUMN_TYPE:
key = HIVE_COLUMNS_COLUMN_TYPE_NEW_DESCRIPTION;
break;
case HIVE_COLUMNS_COLUMN_NAME:
key = HIVE_COLUMNS_COLUMN_NAME_NEW_DESCRIPTION;
break;
case HIVE_COLUMNS_COLUMN_COMMENT:
key = HIVE_COLUMNS_COLUMN_COMMENT_NEW_DESCRIPTION;
break;
}
if (entry.getValue().getFragments().length >= 1) {
highlightsList.add(new Pair(key, entry.getValue().getFragments()[0].toString()));
}
}
schemaSearchResultData.setHighlights(highlightsList);
schemaSearchResultData.setHiveColumns(hiveColumns);
schemaSearchResultData.setRawData(searchHit.getSource());
schemaTypeResultCount += 1L;
return schemaSearchResultData;
}
Aggregations