use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.JavaTypeFactoryImpl in project druid by druid-io.
the class SystemSchemaTest method verifyTypes.
private static void verifyTypes(final List<Object[]> rows, final RowSignature signature) {
final RelDataType rowType = RowSignatures.toRelDataType(signature, new JavaTypeFactoryImpl());
for (Object[] row : rows) {
Assert.assertEquals(row.length, signature.size());
for (int i = 0; i < row.length; i++) {
final Class<?> expectedClass;
final ColumnType columnType = signature.getColumnType(i).orElseThrow(() -> new ISE("Encountered null column type"));
final boolean nullable = rowType.getFieldList().get(i).getType().isNullable();
switch(columnType.getType()) {
case LONG:
expectedClass = Long.class;
break;
case FLOAT:
expectedClass = Float.class;
break;
case DOUBLE:
expectedClass = Double.class;
break;
case STRING:
if (signature.getColumnName(i).equals("segment_id")) {
expectedClass = SegmentId.class;
} else {
expectedClass = String.class;
}
break;
default:
throw new IAE("Don't know what class to expect for valueType[%s]", columnType);
}
if (nullable) {
Assert.assertTrue(StringUtils.format("Column[%s] is a [%s] or null (was %s)", signature.getColumnName(i), expectedClass.getName(), row[i] == null ? null : row[i].getClass().getName()), row[i] == null || expectedClass.isAssignableFrom(row[i].getClass()));
} else {
Assert.assertTrue(StringUtils.format("Column[%s] is a [%s] (was %s)", signature.getColumnName(i), expectedClass.getName(), row[i] == null ? null : row[i].getClass().getName()), row[i] != null && expectedClass.isAssignableFrom(row[i].getClass()));
}
}
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.JavaTypeFactoryImpl in project druid by druid-io.
the class DruidSchemaTest method testGetTableMapFoo2.
@Test
public void testGetTableMapFoo2() {
final DruidTable fooTable = (DruidTable) schema.getTableMap().get("foo2");
final RelDataType rowType = fooTable.getRowType(new JavaTypeFactoryImpl());
final List<RelDataTypeField> fields = rowType.getFieldList();
Assert.assertEquals(3, fields.size());
Assert.assertEquals("__time", fields.get(0).getName());
Assert.assertEquals(SqlTypeName.TIMESTAMP, fields.get(0).getType().getSqlTypeName());
Assert.assertEquals("dim2", fields.get(1).getName());
Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(1).getType().getSqlTypeName());
Assert.assertEquals("m1", fields.get(2).getName());
Assert.assertEquals(SqlTypeName.BIGINT, fields.get(2).getType().getSqlTypeName());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.JavaTypeFactoryImpl in project druid by druid-io.
the class DruidSchemaTest method testGetTableMapFoo.
@Test
public void testGetTableMapFoo() {
final DruidTable fooTable = (DruidTable) schema.getTableMap().get("foo");
final RelDataType rowType = fooTable.getRowType(new JavaTypeFactoryImpl());
final List<RelDataTypeField> fields = rowType.getFieldList();
Assert.assertEquals(6, fields.size());
Assert.assertEquals("__time", fields.get(0).getName());
Assert.assertEquals(SqlTypeName.TIMESTAMP, fields.get(0).getType().getSqlTypeName());
Assert.assertEquals("cnt", fields.get(1).getName());
Assert.assertEquals(SqlTypeName.BIGINT, fields.get(1).getType().getSqlTypeName());
Assert.assertEquals("dim1", fields.get(2).getName());
Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(2).getType().getSqlTypeName());
Assert.assertEquals("dim2", fields.get(3).getName());
Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(3).getType().getSqlTypeName());
Assert.assertEquals("m1", fields.get(4).getName());
Assert.assertEquals(SqlTypeName.BIGINT, fields.get(4).getType().getSqlTypeName());
Assert.assertEquals("unique_dim1", fields.get(5).getName());
Assert.assertEquals(SqlTypeName.OTHER, fields.get(5).getType().getSqlTypeName());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.JavaTypeFactoryImpl in project drill by apache.
the class EnumerableRecordReader method setup.
@SuppressWarnings("unchecked")
private void setup(OperatorContext context) {
SchemaPlus rootSchema = context.getFragmentContext().getFullRootSchema();
DataContext root = new DrillDataContext(schemaPath != null ? SchemaUtilites.searchSchemaTree(rootSchema, SchemaUtilites.getSchemaPathAsList(schemaPath)) : rootSchema, new JavaTypeFactoryImpl(), Collections.emptyMap());
try {
Class<?> implementationClass = ClassBuilder.getCompiledClass(code, CLASS_NAME, context.getFragmentContext().getConfig(), context.getFragmentContext().getOptions());
Iterable<?> iterable = (Iterable<Map<String, Object>>) implementationClass.getMethod(BuiltInMethod.BINDABLE_BIND.method.getName(), DataContext.class).invoke(implementationClass.newInstance(), root);
if (fieldsMap.keySet().size() == 1) {
// for the case of projecting single column, its value is returned
records = StreamSupport.stream(iterable.spliterator(), false).map(this::wrap).iterator();
} else {
// for the case when all columns were projected, array is returned
records = StreamSupport.stream(iterable.spliterator(), false).map(row -> wrap((Object[]) row)).iterator();
}
} catch (CompileException | IOException | ClassTransformationException | ReflectiveOperationException e) {
logger.error("Exception happened when executing generated code", e);
Throwable rootCause = Throwables.getRootCause(e);
throw new DrillRuntimeException(rootCause.getMessage(), rootCause);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.JavaTypeFactoryImpl in project beam by apache.
the class LazyAggregateCombineFnTest method nonparameterizedGetUdafImpl_throwsIllegalStateException.
@Test
public void nonparameterizedGetUdafImpl_throwsIllegalStateException() {
LazyAggregateCombineFn<?, ?, ?> combiner = new LazyAggregateCombineFn<>(new NonParameterizedAggregateFn());
AggregateFunction aggregateFunction = combiner.getUdafImpl();
RelDataTypeFactory typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
exceptions.expect(IllegalStateException.class);
List<FunctionParameter> params = aggregateFunction.getParameters();
params.get(0).getType(typeFactory);
}
Aggregations