use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by axbaretto.
the class FunctionImplementationRegistry method functionReplacement.
/**
* Checks if this function replacement is needed.
*
* @param functionCall function call
* @return new function name is replacement took place, otherwise original function name
*/
private String functionReplacement(FunctionCall functionCall) {
String funcName = functionCall.getName();
if (functionCall.args.size() == 0) {
return funcName;
}
boolean castToNullableNumeric = optionManager != null && optionManager.getOption(ExecConstants.CAST_TO_NULLABLE_NUMERIC_OPTION);
if (!castToNullableNumeric) {
return funcName;
}
MajorType majorType = functionCall.args.get(0).getMajorType();
DataMode dataMode = majorType.getMode();
MinorType minorType = majorType.getMinorType();
if (CastFunctions.isReplacementNeeded(funcName, minorType)) {
funcName = CastFunctions.getReplacingCastFunction(funcName, dataMode, minorType);
}
return funcName;
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by axbaretto.
the class SchemaUtil method mergeSchemas.
/**
* Returns the merger of schemas. The merged schema will include the union all columns. If there is a type conflict
* between columns with the same schemapath but different types, the merged schema will contain a Union type.
* @param schemas
* @return
*/
public static BatchSchema mergeSchemas(BatchSchema... schemas) {
Map<SchemaPath, Set<MinorType>> typeSetMap = Maps.newLinkedHashMap();
for (BatchSchema s : schemas) {
for (MaterializedField field : s) {
SchemaPath path = SchemaPath.getSimplePath(field.getName());
Set<MinorType> currentTypes = typeSetMap.get(path);
if (currentTypes == null) {
currentTypes = Sets.newHashSet();
typeSetMap.put(path, currentTypes);
}
MinorType newType = field.getType().getMinorType();
if (newType == MinorType.MAP || newType == MinorType.LIST) {
throw new RuntimeException("Schema change not currently supported for schemas with complex types");
}
if (newType == MinorType.UNION) {
currentTypes.addAll(field.getType().getSubTypeList());
} else {
currentTypes.add(newType);
}
}
}
List<MaterializedField> fields = Lists.newArrayList();
for (SchemaPath path : typeSetMap.keySet()) {
Set<MinorType> types = typeSetMap.get(path);
if (types.size() > 1) {
MajorType.Builder builder = MajorType.newBuilder().setMinorType(MinorType.UNION).setMode(DataMode.OPTIONAL);
for (MinorType t : types) {
builder.addSubType(t);
}
MaterializedField field = MaterializedField.create(path.getLastSegment().getNameSegment().getPath(), builder.build());
fields.add(field);
} else {
MaterializedField field = MaterializedField.create(path.getLastSegment().getNameSegment().getPath(), Types.optional(types.iterator().next()));
fields.add(field);
}
}
SchemaBuilder schemaBuilder = new SchemaBuilder();
BatchSchema s = schemaBuilder.addFields(fields).setSelectionVectorMode(schemas[0].getSelectionVectorMode()).build();
return s;
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by axbaretto.
the class SchemaUtil method coerceVector.
@SuppressWarnings("resource")
private static ValueVector coerceVector(ValueVector v, VectorContainer c, MaterializedField field, int recordCount, BufferAllocator allocator) {
if (v != null) {
int valueCount = v.getAccessor().getValueCount();
TransferPair tp = v.getTransferPair(allocator);
tp.transfer();
if (v.getField().getType().getMinorType().equals(field.getType().getMinorType())) {
if (field.getType().getMinorType() == MinorType.UNION) {
UnionVector u = (UnionVector) tp.getTo();
for (MinorType t : field.getType().getSubTypeList()) {
u.addSubType(t);
}
}
return tp.getTo();
} else {
ValueVector newVector = TypeHelper.getNewVector(field, allocator);
Preconditions.checkState(field.getType().getMinorType() == MinorType.UNION, "Can only convert vector to Union vector");
UnionVector u = (UnionVector) newVector;
u.setFirstType(tp.getTo(), valueCount);
return u;
}
} else {
v = TypeHelper.getNewVector(field, allocator);
v.allocateNew();
v.getMutator().setValueCount(recordCount);
return v;
}
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by axbaretto.
the class VariantSchema method addType.
public ColumnMetadata addType(MaterializedField field) {
Preconditions.checkState(!isSimple);
MinorType type = field.getType().getMinorType();
checkType(type);
AbstractColumnMetadata col;
switch(type) {
case LIST:
col = new VariantColumnMetadata(field);
break;
case MAP:
col = new MapColumnMetadata(field);
break;
case UNION:
throw new IllegalArgumentException("Cannot add a union to a union");
default:
col = new PrimitiveColumnMetadata(field);
break;
}
types.put(type, col);
return col;
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by axbaretto.
the class ParquetGroupScan method populatePruningVector.
public void populatePruningVector(ValueVector v, int index, SchemaPath column, String file) {
String f = Path.getPathWithoutSchemeAndAuthority(new Path(file)).toString();
MajorType majorType = getTypeForColumn(column);
MinorType type = majorType.getMinorType();
switch(type) {
case BIT:
{
NullableBitVector bitVector = (NullableBitVector) v;
Boolean value = (Boolean) partitionValueMap.get(f).get(column);
if (value == null) {
bitVector.getMutator().setNull(index);
} else {
bitVector.getMutator().setSafe(index, value ? 1 : 0);
}
return;
}
case INT:
{
NullableIntVector intVector = (NullableIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
if (value == null) {
intVector.getMutator().setNull(index);
} else {
intVector.getMutator().setSafe(index, value);
}
return;
}
case SMALLINT:
{
NullableSmallIntVector smallIntVector = (NullableSmallIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
if (value == null) {
smallIntVector.getMutator().setNull(index);
} else {
smallIntVector.getMutator().setSafe(index, value.shortValue());
}
return;
}
case TINYINT:
{
NullableTinyIntVector tinyIntVector = (NullableTinyIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
if (value == null) {
tinyIntVector.getMutator().setNull(index);
} else {
tinyIntVector.getMutator().setSafe(index, value.byteValue());
}
return;
}
case UINT1:
{
NullableUInt1Vector intVector = (NullableUInt1Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
if (value == null) {
intVector.getMutator().setNull(index);
} else {
intVector.getMutator().setSafe(index, value.byteValue());
}
return;
}
case UINT2:
{
NullableUInt2Vector intVector = (NullableUInt2Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
if (value == null) {
intVector.getMutator().setNull(index);
} else {
intVector.getMutator().setSafe(index, (char) value.shortValue());
}
return;
}
case UINT4:
{
NullableUInt4Vector intVector = (NullableUInt4Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
if (value == null) {
intVector.getMutator().setNull(index);
} else {
intVector.getMutator().setSafe(index, value);
}
return;
}
case BIGINT:
{
NullableBigIntVector bigIntVector = (NullableBigIntVector) v;
Long value = (Long) partitionValueMap.get(f).get(column);
if (value == null) {
bigIntVector.getMutator().setNull(index);
} else {
bigIntVector.getMutator().setSafe(index, value);
}
return;
}
case FLOAT4:
{
NullableFloat4Vector float4Vector = (NullableFloat4Vector) v;
Float value = (Float) partitionValueMap.get(f).get(column);
if (value == null) {
float4Vector.getMutator().setNull(index);
} else {
float4Vector.getMutator().setSafe(index, value);
}
return;
}
case FLOAT8:
{
NullableFloat8Vector float8Vector = (NullableFloat8Vector) v;
Double value = (Double) partitionValueMap.get(f).get(column);
if (value == null) {
float8Vector.getMutator().setNull(index);
} else {
float8Vector.getMutator().setSafe(index, value);
}
return;
}
case VARBINARY:
{
NullableVarBinaryVector varBinaryVector = (NullableVarBinaryVector) v;
Object s = partitionValueMap.get(f).get(column);
byte[] bytes;
if (s == null) {
varBinaryVector.getMutator().setNull(index);
return;
} else {
bytes = getBytes(type, s);
}
varBinaryVector.getMutator().setSafe(index, bytes, 0, bytes.length);
return;
}
case DECIMAL18:
{
NullableDecimal18Vector decimalVector = (NullableDecimal18Vector) v;
Object s = partitionValueMap.get(f).get(column);
byte[] bytes;
if (s == null) {
decimalVector.getMutator().setNull(index);
return;
} else if (s instanceof Integer) {
long value = DecimalUtility.getBigDecimalFromPrimitiveTypes((Integer) s, majorType.getScale(), majorType.getPrecision()).longValue();
decimalVector.getMutator().setSafe(index, value);
return;
} else if (s instanceof Long) {
long value = DecimalUtility.getBigDecimalFromPrimitiveTypes((Long) s, majorType.getScale(), majorType.getPrecision()).longValue();
decimalVector.getMutator().setSafe(index, value);
return;
} else {
bytes = getBytes(type, s);
}
long value = DecimalUtility.getBigDecimalFromByteArray(bytes, 0, bytes.length, majorType.getScale()).longValue();
decimalVector.getMutator().setSafe(index, value);
return;
}
case DATE:
{
NullableDateVector dateVector = (NullableDateVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
if (value == null) {
dateVector.getMutator().setNull(index);
} else {
dateVector.getMutator().setSafe(index, value * (long) DateTimeConstants.MILLIS_PER_DAY);
}
return;
}
case TIME:
{
NullableTimeVector timeVector = (NullableTimeVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
if (value == null) {
timeVector.getMutator().setNull(index);
} else {
timeVector.getMutator().setSafe(index, value);
}
return;
}
case TIMESTAMP:
{
NullableTimeStampVector timeStampVector = (NullableTimeStampVector) v;
Long value = (Long) partitionValueMap.get(f).get(column);
if (value == null) {
timeStampVector.getMutator().setNull(index);
} else {
timeStampVector.getMutator().setSafe(index, value);
}
return;
}
case VARCHAR:
{
NullableVarCharVector varCharVector = (NullableVarCharVector) v;
Object s = partitionValueMap.get(f).get(column);
byte[] bytes;
if (s == null) {
varCharVector.getMutator().setNull(index);
return;
} else {
bytes = getBytes(type, s);
}
varCharVector.getMutator().setSafe(index, bytes, 0, bytes.length);
return;
}
case INTERVAL:
{
NullableIntervalVector intervalVector = (NullableIntervalVector) v;
Object s = partitionValueMap.get(f).get(column);
byte[] bytes;
if (s == null) {
intervalVector.getMutator().setNull(index);
return;
} else {
bytes = getBytes(type, s);
}
intervalVector.getMutator().setSafe(index, 1, ParquetReaderUtility.getIntFromLEBytes(bytes, 0), ParquetReaderUtility.getIntFromLEBytes(bytes, 4), ParquetReaderUtility.getIntFromLEBytes(bytes, 8));
return;
}
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
}
Aggregations