use of org.hypertrace.core.query.service.api.ResultSetChunk in project gateway-service by hypertrace.
the class EntityInteractionsFetcher method parseResultSet.
private void parseResultSet(String entityType, String otherEntityType, Collection<Expression> selections, Map<String, FunctionExpression> metricToAggFunction, Iterator<ResultSetChunk> resultset, boolean incoming, Map<EntityKey, Builder> entityIdToBuilders, RequestContext requestContext) {
Map<String, AttributeMetadata> attributeMetadataMap = metadataProvider.getAttributesMetadata(requestContext, SCOPE);
Map<String, AttributeKind> aliasToAttributeKind = MetricAggregationFunctionUtil.getValueTypeForFunctionType(metricToAggFunction, attributeMetadataMap);
while (resultset.hasNext()) {
ResultSetChunk chunk = resultset.next();
if (LOG.isDebugEnabled()) {
LOG.debug("Received chunk: " + chunk.toString());
}
if (chunk.getRowCount() < 1) {
break;
}
for (Row row : chunk.getRowList()) {
// Construct the from/to EntityKeys from the columns
List<String> idColumns = getEntityIdColumnsFromInteraction(DomainEntityType.valueOf(entityType.toUpperCase()), // Note: We add the selections it in this order
!incoming);
EntityKey entityId = EntityKey.of(IntStream.range(0, idColumns.size()).mapToObj(value -> row.getColumn(value).getString()).toArray(String[]::new));
List<String> otherIdColumns = getEntityIdColumnsFromInteraction(DomainEntityType.valueOf(otherEntityType.toUpperCase()), incoming);
EntityKey otherEntityId = EntityKey.of(IntStream.range(idColumns.size(), idColumns.size() + otherIdColumns.size()).mapToObj(value -> row.getColumn(value).getString()).toArray(String[]::new));
EntityInteraction.Builder interaction = EntityInteraction.newBuilder();
addInteractionEdges(interaction, selections, incoming ? otherEntityType : entityType, incoming ? otherEntityId : entityId, incoming ? entityType : otherEntityType, incoming ? entityId : otherEntityId);
for (int i = idColumns.size() + otherIdColumns.size(); i < chunk.getResultSetMetadata().getColumnMetadataCount(); i++) {
ColumnMetadata metadata = chunk.getResultSetMetadata().getColumnMetadata(i);
// Ignore the count column since we introduced that ourselves into the query.
if (StringUtils.equalsIgnoreCase(COUNT_COLUMN_NAME, metadata.getColumnName())) {
continue;
}
// Check if this is an attribute vs metric and set it accordingly on the interaction.
if (metricToAggFunction.containsKey(metadata.getColumnName())) {
Value value = QueryAndGatewayDtoConverter.convertToGatewayValueForMetricValue(aliasToAttributeKind, attributeMetadataMap, metadata, row.getColumn(i));
interaction.putMetrics(metadata.getColumnName(), AggregatedMetricValue.newBuilder().setValue(value).setFunction(metricToAggFunction.get(metadata.getColumnName()).getFunction()).build());
} else {
interaction.putAttribute(metadata.getColumnName(), QueryAndGatewayDtoConverter.convertQueryValueToGatewayValue(row.getColumn(i), attributeMetadataMap.get(metadata.getColumnName())));
}
}
if (incoming) {
entityIdToBuilders.get(entityId).addIncomingInteraction(interaction);
} else {
entityIdToBuilders.get(entityId).addOutgoingInteraction(interaction);
}
if (LOG.isDebugEnabled()) {
LOG.debug(interaction.build().toString());
}
}
}
}
use of org.hypertrace.core.query.service.api.ResultSetChunk in project gateway-service by hypertrace.
the class QueryServiceEntityFetcher method getEntities.
@Override
public EntityFetcherResponse getEntities(EntitiesRequestContext requestContext, EntitiesRequest entitiesRequest) {
Map<String, AttributeMetadata> attributeMetadataMap = attributeMetadataProvider.getAttributesMetadata(requestContext, entitiesRequest.getEntityType());
Map<String, AttributeMetadata> resultKeyToAttributeMetadataMap = this.remapAttributeMetadataByResultName(entitiesRequest, attributeMetadataMap);
// Validate EntitiesRequest
entitiesRequestValidator.validate(entitiesRequest, attributeMetadataMap);
List<String> entityIdAttributeIds = AttributeMetadataUtil.getIdAttributeIds(attributeMetadataProvider, entityIdColumnsConfigs, requestContext, entitiesRequest.getEntityType());
List<org.hypertrace.gateway.service.v1.common.Expression> aggregates = ExpressionReader.getFunctionExpressions(entitiesRequest.getSelectionList());
Map<String, List<String>> requestedAliasesByEntityIdAttributeIds = getExpectedResultNamesForEachAttributeId(entitiesRequest.getSelectionList(), entityIdAttributeIds);
QueryRequest.Builder builder = constructSelectionQuery(requestContext, entitiesRequest, entityIdAttributeIds, aggregates);
adjustLimitAndOffset(builder, entitiesRequest.getLimit(), entitiesRequest.getOffset());
if (!entitiesRequest.getOrderByList().isEmpty()) {
// Order by from the request.
builder.addAllOrderBy(QueryAndGatewayDtoConverter.convertToQueryOrderByExpressions(entitiesRequest.getOrderByList()));
}
QueryRequest queryRequest = builder.build();
LOG.debug("Sending Query to Query Service ======== \n {}", queryRequest);
Iterator<ResultSetChunk> resultSetChunkIterator = queryServiceClient.executeQuery(queryRequest, requestContext.getHeaders(), requestTimeout);
// We want to retain the order as returned from the respective source. Hence using a
// LinkedHashMap
Map<EntityKey, Entity.Builder> entityBuilders = new LinkedHashMap<>();
while (resultSetChunkIterator.hasNext()) {
ResultSetChunk chunk = resultSetChunkIterator.next();
LOG.debug("Received chunk: {}", chunk);
if (chunk.getRowCount() < 1) {
break;
}
for (Row row : chunk.getRowList()) {
// Construct the entity id from the entityIdAttributeIds columns
EntityKey entityKey = EntityKey.of(IntStream.range(0, entityIdAttributeIds.size()).mapToObj(value -> row.getColumn(value).getString()).toArray(String[]::new));
Builder entityBuilder = entityBuilders.computeIfAbsent(entityKey, k -> Entity.newBuilder());
entityBuilder.setEntityType(entitiesRequest.getEntityType());
entityBuilder.setId(entityKey.toString());
// as post processing.
for (int i = 0; i < entityIdAttributeIds.size(); i++) {
entityBuilder.putAttribute(entityIdAttributeIds.get(i), Value.newBuilder().setString(entityKey.getAttributes().get(i)).setValueType(ValueType.STRING).build());
}
requestedAliasesByEntityIdAttributeIds.forEach((attributeId, requestedAliasList) -> requestedAliasList.forEach(requestedAlias -> entityBuilder.putAttribute(requestedAlias, entityBuilder.getAttributeOrThrow(attributeId))));
for (int i = entityIdAttributeIds.size(); i < chunk.getResultSetMetadata().getColumnMetadataCount(); i++) {
ColumnMetadata metadata = chunk.getResultSetMetadata().getColumnMetadata(i);
org.hypertrace.core.query.service.api.Value columnValue = row.getColumn(i);
buildEntity(entityBuilder, requestContext, metadata, columnValue, resultKeyToAttributeMetadataMap, aggregates.isEmpty());
}
}
}
return new EntityFetcherResponse(entityBuilders);
}
use of org.hypertrace.core.query.service.api.ResultSetChunk in project gateway-service by hypertrace.
the class QueryServiceEntityFetcher method getTimeAggregatedMetrics.
@Override
public EntityFetcherResponse getTimeAggregatedMetrics(EntitiesRequestContext requestContext, EntitiesRequest entitiesRequest) {
// No need to make execute the rest of this if there are no TimeAggregations in the request.
if (entitiesRequest.getTimeAggregationCount() == 0) {
return new EntityFetcherResponse();
}
// Only supported filter is entityIds IN ["id1", "id2", "id3"]
List<String> idColumns = AttributeMetadataUtil.getIdAttributeIds(attributeMetadataProvider, entityIdColumnsConfigs, requestContext, entitiesRequest.getEntityType());
String timeColumn = AttributeMetadataUtil.getTimestampAttributeId(attributeMetadataProvider, requestContext, entitiesRequest.getEntityType());
Map<String, AttributeMetadata> attributeMetadataMap = attributeMetadataProvider.getAttributesMetadata(requestContext, entitiesRequest.getEntityType());
Map<String, AttributeMetadata> resultKeyToAttributeMetadataMap = this.remapAttributeMetadataByResultName(entitiesRequest, attributeMetadataMap);
entitiesRequestValidator.validate(entitiesRequest, attributeMetadataMap);
entitiesRequest.getTimeAggregationList().forEach(timeAggregation -> requestContext.mapAliasToTimeAggregation(timeAggregation.getAggregation().getFunction().getAlias(), timeAggregation));
// First group the Aggregations based on the period so that we can issue separate queries
// to QueryService for each different Period.
Collection<List<TimeAggregation>> result = entitiesRequest.getTimeAggregationList().stream().collect(Collectors.groupingBy(TimeAggregation::getPeriod)).values();
Map<EntityKey, Map<String, MetricSeries.Builder>> entityMetricSeriesMap = new LinkedHashMap<>();
for (List<TimeAggregation> batch : result) {
Period period = batch.get(0).getPeriod();
ChronoUnit unit = ChronoUnit.valueOf(period.getUnit());
long periodSecs = Duration.of(period.getValue(), unit).getSeconds();
QueryRequest request = buildTimeSeriesQueryRequest(entitiesRequest, requestContext, periodSecs, batch, idColumns, timeColumn);
if (LOG.isDebugEnabled()) {
LOG.debug("Sending time series queryRequest to query service: ======== \n {}", request.toString());
}
Iterator<ResultSetChunk> resultSetChunkIterator = queryServiceClient.executeQuery(request, requestContext.getHeaders(), requestTimeout);
while (resultSetChunkIterator.hasNext()) {
ResultSetChunk chunk = resultSetChunkIterator.next();
if (LOG.isDebugEnabled()) {
LOG.debug("Received chunk: " + chunk.toString());
}
if (chunk.getRowCount() < 1) {
break;
}
if (!chunk.hasResultSetMetadata()) {
LOG.warn("Chunk doesn't have result metadata so couldn't process the response.");
break;
}
for (Row row : chunk.getRowList()) {
// Construct the entity id from the entityIdAttributeIds columns
EntityKey entityKey = EntityKey.of(IntStream.range(0, idColumns.size()).mapToObj(value -> row.getColumn(value).getString()).toArray(String[]::new));
Map<String, MetricSeries.Builder> metricSeriesMap = entityMetricSeriesMap.computeIfAbsent(entityKey, k -> new LinkedHashMap<>());
Interval.Builder intervalBuilder = Interval.newBuilder();
// Second column is the time column
Value value = QueryAndGatewayDtoConverter.convertQueryValueToGatewayValue(row.getColumn(idColumns.size()));
if (value.getValueType() == ValueType.STRING) {
long startTime = Long.parseLong(value.getString());
long endTime = startTime + TimeUnit.SECONDS.toMillis(periodSecs);
intervalBuilder.setStartTimeMillis(startTime);
intervalBuilder.setEndTimeMillis(endTime);
for (int i = idColumns.size() + 1; i < chunk.getResultSetMetadata().getColumnMetadataCount(); i++) {
ColumnMetadata metadata = chunk.getResultSetMetadata().getColumnMetadata(i);
TimeAggregation timeAggregation = requestContext.getTimeAggregationByAlias(metadata.getColumnName());
if (timeAggregation == null) {
LOG.warn("Couldn't find an aggregate for column: {}", metadata.getColumnName());
continue;
}
FunctionType functionType = timeAggregation.getAggregation().getFunction().getFunction();
AttributeMetadata functionAttributeMetadata = resultKeyToAttributeMetadataMap.get(metadata.getColumnName());
Value convertedValue = QueryAndGatewayDtoConverter.convertToGatewayValueForMetricValue(MetricAggregationFunctionUtil.getValueTypeForFunctionType(functionType, functionAttributeMetadata), resultKeyToAttributeMetadataMap, metadata, row.getColumn(i));
List<org.hypertrace.gateway.service.v1.common.Expression> healthExpressions = timeAggregation.getAggregation().getFunction().getArgumentsList().stream().filter(org.hypertrace.gateway.service.v1.common.Expression::hasHealth).collect(Collectors.toList());
Preconditions.checkArgument(healthExpressions.size() <= 1);
Health health = Health.NOT_COMPUTED;
MetricSeries.Builder seriesBuilder = metricSeriesMap.computeIfAbsent(metadata.getColumnName(), k -> getMetricSeriesBuilder(timeAggregation));
seriesBuilder.addValue(Interval.newBuilder(intervalBuilder.build()).setValue(convertedValue).setHealth(health));
}
} else {
LOG.warn("Was expecting STRING values only but received valueType: {}", value.getValueType());
}
}
}
}
Map<EntityKey, Entity.Builder> resultMap = new LinkedHashMap<>();
for (Map.Entry<EntityKey, Map<String, MetricSeries.Builder>> entry : entityMetricSeriesMap.entrySet()) {
Entity.Builder entityBuilder = Entity.newBuilder().setEntityType(entitiesRequest.getEntityType()).setId(entry.getKey().toString()).putAllMetricSeries(entry.getValue().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> getSortedMetricSeries(e.getValue()))));
for (int i = 0; i < idColumns.size(); i++) {
entityBuilder.putAttribute(idColumns.get(i), Value.newBuilder().setString(entry.getKey().getAttributes().get(i)).setValueType(ValueType.STRING).build());
}
resultMap.put(entry.getKey(), entityBuilder);
}
return new EntityFetcherResponse(resultMap);
}
use of org.hypertrace.core.query.service.api.ResultSetChunk in project gateway-service by hypertrace.
the class BaselineServiceImpl method getBaselineForEntities.
public BaselineEntitiesResponse getBaselineForEntities(String tenantId, BaselineEntitiesRequest originalRequest, Map<String, String> requestHeaders) {
BaselineRequestContext requestContext = getRequestContext(tenantId, requestHeaders, originalRequest);
Map<String, AttributeMetadata> attributeMetadataMap = attributeMetadataProvider.getAttributesMetadata(requestContext, originalRequest.getEntityType());
baselineEntitiesRequestValidator.validate(originalRequest, attributeMetadataMap);
String timeColumn = AttributeMetadataUtil.getTimestampAttributeId(attributeMetadataProvider, requestContext, originalRequest.getEntityType());
Map<String, BaselineEntity> baselineEntityAggregatedMetricsMap = new HashMap<>();
Map<String, BaselineEntity> baselineEntityTimeSeriesMap = new HashMap<>();
if (originalRequest.getBaselineAggregateRequestCount() > 0) {
// Aggregated Functions data
Period aggTimePeriod = getPeriod(originalRequest.getStartTimeMillis(), originalRequest.getEndTimeMillis());
long periodSecs = getPeriodInSecs(aggTimePeriod);
long aggStartTime = originalRequest.getStartTimeMillis();
long aggEndTime = originalRequest.getEndTimeMillis();
List<TimeAggregation> timeAggregations = getTimeAggregationsForAggregateExpr(originalRequest, aggStartTime, aggEndTime);
updateAliasMap(requestContext, timeAggregations);
// Take more data to calculate baseline and standard deviation.
long seriesStartTime = getUpdatedStartTime(aggStartTime, aggEndTime);
long seriesEndTime = aggStartTime;
List<String> entityIdAttributes = AttributeMetadataUtil.getIdAttributeIds(attributeMetadataProvider, entityIdColumnsConfigs, requestContext, originalRequest.getEntityType());
QueryRequest aggQueryRequest = baselineServiceQueryParser.getQueryRequest(seriesStartTime, seriesEndTime, originalRequest.getEntityIdsList(), timeColumn, timeAggregations, periodSecs, entityIdAttributes);
Iterator<ResultSetChunk> aggResponseChunkIterator = baselineServiceQueryExecutor.executeQuery(requestHeaders, aggQueryRequest);
BaselineEntitiesResponse aggEntitiesResponse = baselineServiceQueryParser.parseQueryResponse(aggResponseChunkIterator, requestContext, entityIdAttributes.size(), originalRequest.getEntityType(), aggStartTime, aggEndTime);
baselineEntityAggregatedMetricsMap = getEntitiesMapFromAggResponse(aggEntitiesResponse);
}
// Time Series data
if (originalRequest.getBaselineMetricSeriesRequestCount() > 0) {
Period timeSeriesPeriod = getTimeSeriesPeriod(originalRequest.getBaselineMetricSeriesRequestList());
long periodSecs = getPeriodInSecs(timeSeriesPeriod);
long alignedStartTime = QueryExpressionUtil.alignToPeriodBoundary(originalRequest.getStartTimeMillis(), periodSecs, true);
long alignedEndTime = QueryExpressionUtil.alignToPeriodBoundary(originalRequest.getEndTimeMillis(), periodSecs, false);
List<TimeAggregation> timeAggregations = getTimeAggregationsForTimeSeriesExpr(originalRequest);
long seriesStartTime = getUpdatedStartTime(alignedStartTime, alignedEndTime);
long seriesEndTime = alignedStartTime;
List<String> entityIdAttributes = AttributeMetadataUtil.getIdAttributeIds(attributeMetadataProvider, entityIdColumnsConfigs, requestContext, originalRequest.getEntityType());
QueryRequest timeSeriesQueryRequest = baselineServiceQueryParser.getQueryRequest(seriesStartTime, seriesEndTime, originalRequest.getEntityIdsList(), timeColumn, timeAggregations, periodSecs, entityIdAttributes);
Iterator<ResultSetChunk> timeSeriesChunkIterator = baselineServiceQueryExecutor.executeQuery(requestHeaders, timeSeriesQueryRequest);
BaselineEntitiesResponse timeSeriesEntitiesResponse = baselineServiceQueryParser.parseQueryResponse(timeSeriesChunkIterator, requestContext, entityIdAttributes.size(), originalRequest.getEntityType(), alignedStartTime, alignedEndTime);
baselineEntityTimeSeriesMap = getEntitiesMapFromTimeSeriesResponse(timeSeriesEntitiesResponse, alignedStartTime, alignedEndTime, periodSecs);
}
return mergeEntities(baselineEntityAggregatedMetricsMap, baselineEntityTimeSeriesMap);
}
use of org.hypertrace.core.query.service.api.ResultSetChunk in project gateway-service by hypertrace.
the class QueryServiceEntityFetcherTests method testGetEntities.
@Test
public void testGetEntities() {
List<OrderByExpression> orderByExpressions = List.of(buildOrderByExpression(API_ID_ATTR));
long startTime = 1L;
long endTime = 10L;
int limit = 10;
int offset = 5;
String tenantId = "TENANT_ID";
Map<String, String> requestHeaders = Map.of("x-tenant-id", tenantId);
AttributeScope entityType = AttributeScope.API;
EntitiesRequest entitiesRequest = EntitiesRequest.newBuilder().setEntityType(entityType.name()).setStartTimeMillis(startTime).setEndTimeMillis(endTime).addSelection(buildExpression(API_NAME_ATTR)).addSelection(buildAggregateExpression(API_NUM_CALLS_ATTR, FunctionType.SUM, "Sum_numCalls", Collections.emptyList())).setFilter(Filter.newBuilder().setOperator(AND).addChildFilter(EntitiesRequestAndResponseUtils.getTimeRangeFilter("API.startTime", startTime, endTime)).addChildFilter(generateEQFilter(API_DISCOVERY_STATE_ATTR, "DISCOVERED"))).addAllOrderBy(orderByExpressions).setLimit(limit).setOffset(offset).build();
EntitiesRequestContext entitiesRequestContext = new EntitiesRequestContext(tenantId, startTime, endTime, entityType.name(), "API.startTime", requestHeaders);
QueryRequest expectedQueryRequest = QueryRequest.newBuilder().addSelection(createAttributeExpression(API_ID_ATTR)).addSelection(createQsAggregationExpression("SUM", API_NUM_CALLS_ATTR, "Sum_numCalls")).addSelection(createAttributeExpression(API_NAME_ATTR)).setFilter(createQsRequestFilter(API_START_TIME_ATTR, API_ID_ATTR, startTime, endTime, createStringFilter(API_DISCOVERY_STATE_ATTR, Operator.EQ, "DISCOVERED"))).addGroupBy(createAttributeExpression(API_ID_ATTR)).addGroupBy(createAttributeExpression(API_NAME_ATTR)).setOffset(offset).setLimit(QueryServiceClient.DEFAULT_QUERY_SERVICE_GROUP_BY_LIMIT).addAllOrderBy(QueryAndGatewayDtoConverter.convertToQueryOrderByExpressions(orderByExpressions)).build();
List<ResultSetChunk> resultSetChunks = List.of(getResultSetChunk(List.of("API.id", "API.name", "Sum_numCalls"), new String[][] { { "apiId1", "api 1", "3" }, { "apiId2", "api 2", "5" } }));
when(queryServiceClient.executeQuery(eq(expectedQueryRequest), eq(requestHeaders), eq(500))).thenReturn(resultSetChunks.iterator());
EntityFetcherResponse response = queryServiceEntityFetcher.getEntities(entitiesRequestContext, entitiesRequest);
assertEquals(2, response.size());
Map<EntityKey, Builder> expectedEntityKeyBuilderResponseMap = new LinkedHashMap<>();
expectedEntityKeyBuilderResponseMap.put(EntityKey.of("apiId1"), Entity.newBuilder().setId("apiId1").setEntityType("API").putAttribute("API.id", getStringValue("apiId1")).putAttribute("API.name", getStringValue("api 1")).putMetric("Sum_numCalls", getAggregatedMetricValue(FunctionType.SUM, 3)));
expectedEntityKeyBuilderResponseMap.put(EntityKey.of("apiId2"), Entity.newBuilder().setId("apiId2").setEntityType("API").putAttribute("API.id", getStringValue("apiId2")).putAttribute("API.name", getStringValue("api 2")).putMetric("Sum_numCalls", getAggregatedMetricValue(FunctionType.SUM, 5)));
compareEntityFetcherResponses(new EntityFetcherResponse(expectedEntityKeyBuilderResponseMap), response);
}
Aggregations