use of io.trino.plugin.elasticsearch.client.IndexMetadata.PrimitiveType in project trino by trinodb.
the class ElasticsearchMetadata method toTrino.
private TypeAndDecoder toTrino(String prefix, IndexMetadata.Field field) {
String path = appendPath(prefix, field.getName());
checkArgument(!field.asRawJson() || !field.isArray(), format("A column, (%s) cannot be declared as a Trino array and also be rendered as json.", path));
if (field.asRawJson()) {
return new TypeAndDecoder(VARCHAR, new RawJsonDecoder.Descriptor(path));
}
if (field.isArray()) {
TypeAndDecoder element = toTrino(path, elementField(field));
return new TypeAndDecoder(new ArrayType(element.getType()), new ArrayDecoder.Descriptor(element.getDecoderDescriptor()));
}
IndexMetadata.Type type = field.getType();
if (type instanceof PrimitiveType) {
switch(((PrimitiveType) type).getName()) {
case "float":
return new TypeAndDecoder(REAL, new RealDecoder.Descriptor(path));
case "double":
return new TypeAndDecoder(DOUBLE, new DoubleDecoder.Descriptor(path));
case "byte":
return new TypeAndDecoder(TINYINT, new TinyintDecoder.Descriptor(path));
case "short":
return new TypeAndDecoder(SMALLINT, new SmallintDecoder.Descriptor(path));
case "integer":
return new TypeAndDecoder(INTEGER, new IntegerDecoder.Descriptor(path));
case "long":
return new TypeAndDecoder(BIGINT, new BigintDecoder.Descriptor(path));
case "text":
case "keyword":
return new TypeAndDecoder(VARCHAR, new VarcharDecoder.Descriptor(path));
case "ip":
return new TypeAndDecoder(ipAddressType, new IpAddressDecoder.Descriptor(path, ipAddressType));
case "boolean":
return new TypeAndDecoder(BOOLEAN, new BooleanDecoder.Descriptor(path));
case "binary":
return new TypeAndDecoder(VARBINARY, new VarbinaryDecoder.Descriptor(path));
}
} else if (type instanceof ScaledFloatType) {
return new TypeAndDecoder(DOUBLE, new DoubleDecoder.Descriptor(path));
} else if (type instanceof DateTimeType) {
if (((DateTimeType) type).getFormats().isEmpty()) {
return new TypeAndDecoder(TIMESTAMP_MILLIS, new TimestampDecoder.Descriptor(path));
}
// otherwise, skip -- we don't support custom formats, yet
} else if (type instanceof ObjectType) {
ObjectType objectType = (ObjectType) type;
ImmutableList.Builder<RowType.Field> rowFieldsBuilder = ImmutableList.builder();
ImmutableList.Builder<RowDecoder.NameAndDescriptor> decoderFields = ImmutableList.builder();
for (IndexMetadata.Field rowField : objectType.getFields()) {
String name = rowField.getName();
TypeAndDecoder child = toTrino(appendPath(path, name), rowField);
if (child != null) {
decoderFields.add(new RowDecoder.NameAndDescriptor(name, child.getDecoderDescriptor()));
rowFieldsBuilder.add(RowType.field(name, child.getType()));
}
}
List<RowType.Field> rowFields = rowFieldsBuilder.build();
if (!rowFields.isEmpty()) {
return new TypeAndDecoder(RowType.from(rowFields), new RowDecoder.Descriptor(path, decoderFields.build()));
}
// otherwise, skip -- row types must have at least 1 field
}
return null;
}
use of io.trino.plugin.elasticsearch.client.IndexMetadata.PrimitiveType in project trino by trinodb.
the class ElasticsearchMetadata method applyFilter.
@Override
public Optional<ConstraintApplicationResult<ConnectorTableHandle>> applyFilter(ConnectorSession session, ConnectorTableHandle table, Constraint constraint) {
ElasticsearchTableHandle handle = (ElasticsearchTableHandle) table;
if (isPassthroughQuery(handle)) {
// filter pushdown currently not supported for passthrough query
return Optional.empty();
}
Map<ColumnHandle, Domain> supported = new HashMap<>();
Map<ColumnHandle, Domain> unsupported = new HashMap<>();
if (constraint.getSummary().getDomains().isPresent()) {
for (Map.Entry<ColumnHandle, Domain> entry : constraint.getSummary().getDomains().get().entrySet()) {
ElasticsearchColumnHandle column = (ElasticsearchColumnHandle) entry.getKey();
if (column.isSupportsPredicates()) {
supported.put(column, entry.getValue());
} else {
unsupported.put(column, entry.getValue());
}
}
}
TupleDomain<ColumnHandle> oldDomain = handle.getConstraint();
TupleDomain<ColumnHandle> newDomain = oldDomain.intersect(TupleDomain.withColumnDomains(supported));
ConnectorExpression oldExpression = constraint.getExpression();
Map<String, String> newRegexes = new HashMap<>(handle.getRegexes());
List<ConnectorExpression> expressions = ConnectorExpressions.extractConjuncts(constraint.getExpression());
List<ConnectorExpression> notHandledExpressions = new ArrayList<>();
for (ConnectorExpression expression : expressions) {
if (expression instanceof Call) {
Call call = (Call) expression;
if (isSupportedLikeCall(call)) {
List<ConnectorExpression> arguments = call.getArguments();
String variableName = ((Variable) arguments.get(0)).getName();
ElasticsearchColumnHandle column = (ElasticsearchColumnHandle) constraint.getAssignments().get(variableName);
verifyNotNull(column, "No assignment for %s", variableName);
String columnName = column.getName();
Object pattern = ((Constant) arguments.get(1)).getValue();
Optional<Slice> escape = Optional.empty();
if (arguments.size() == 3) {
escape = Optional.of((Slice) (((Constant) arguments.get(2)).getValue()));
}
if (!newRegexes.containsKey(columnName) && pattern instanceof Slice) {
IndexMetadata metadata = client.getIndexMetadata(handle.getIndex());
if (metadata.getSchema().getFields().stream().anyMatch(field -> columnName.equals(field.getName()) && field.getType() instanceof PrimitiveType && "keyword".equals(((PrimitiveType) field.getType()).getName()))) {
newRegexes.put(columnName, likeToRegexp(((Slice) pattern), escape));
continue;
}
}
}
}
notHandledExpressions.add(expression);
}
ConnectorExpression newExpression = ConnectorExpressions.and(notHandledExpressions);
if (oldDomain.equals(newDomain) && oldExpression.equals(newExpression)) {
return Optional.empty();
}
handle = new ElasticsearchTableHandle(handle.getType(), handle.getSchema(), handle.getIndex(), newDomain, newRegexes, handle.getQuery(), handle.getLimit());
return Optional.of(new ConstraintApplicationResult<>(handle, TupleDomain.withColumnDomains(unsupported), newExpression, false));
}
Aggregations