use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class MapUtility method writeToListFromReader.
public static void writeToListFromReader(FieldReader fieldReader, ListWriter listWriter, String caller) {
try {
MajorType valueMajorType = fieldReader.getType();
MinorType valueMinorType = valueMajorType.getMinorType();
switch(valueMinorType) {
case TINYINT:
fieldReader.copyAsValue(listWriter.tinyInt());
break;
case SMALLINT:
fieldReader.copyAsValue(listWriter.smallInt());
break;
case BIGINT:
fieldReader.copyAsValue(listWriter.bigInt());
break;
case INT:
fieldReader.copyAsValue(listWriter.integer());
break;
case UINT1:
fieldReader.copyAsValue(listWriter.uInt1());
break;
case UINT2:
fieldReader.copyAsValue(listWriter.uInt2());
break;
case UINT4:
fieldReader.copyAsValue(listWriter.uInt4());
break;
case UINT8:
fieldReader.copyAsValue(listWriter.uInt8());
break;
case DECIMAL9:
fieldReader.copyAsValue(listWriter.decimal9());
break;
case DECIMAL18:
fieldReader.copyAsValue(listWriter.decimal18());
break;
case DECIMAL28SPARSE:
fieldReader.copyAsValue(listWriter.decimal28Sparse());
break;
case DECIMAL38SPARSE:
fieldReader.copyAsValue(listWriter.decimal38Sparse());
break;
case VARDECIMAL:
fieldReader.copyAsValue(listWriter.varDecimal(valueMajorType.getPrecision(), valueMajorType.getScale()));
break;
case DATE:
fieldReader.copyAsValue(listWriter.date());
break;
case TIME:
fieldReader.copyAsValue(listWriter.time());
break;
case TIMESTAMP:
fieldReader.copyAsValue(listWriter.timeStamp());
break;
case INTERVAL:
fieldReader.copyAsValue(listWriter.interval());
break;
case INTERVALDAY:
fieldReader.copyAsValue(listWriter.intervalDay());
break;
case INTERVALYEAR:
fieldReader.copyAsValue(listWriter.intervalYear());
break;
case FLOAT4:
fieldReader.copyAsValue(listWriter.float4());
break;
case FLOAT8:
fieldReader.copyAsValue(listWriter.float8());
break;
case BIT:
fieldReader.copyAsValue(listWriter.bit());
break;
case VARCHAR:
fieldReader.copyAsValue(listWriter.varChar());
break;
case VARBINARY:
fieldReader.copyAsValue(listWriter.varBinary());
break;
case MAP:
fieldReader.copyAsValue(listWriter.map());
break;
case LIST:
fieldReader.copyAsValue(listWriter.list());
break;
default:
throw new DrillRuntimeException(String.format(caller + " function does not support input of type: %s", valueMinorType));
}
} catch (ClassCastException e) {
final MaterializedField field = fieldReader.getField();
throw new DrillRuntimeException(String.format(caller + TYPE_MISMATCH_ERROR, field.getName(), field.getType()));
}
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class BaseWriterBuilder method buildUnion.
private AbstractObjectWriter buildUnion(UnionVector vector, VectorDescrip descrip) {
if (vector == null) {
throw new UnsupportedOperationException("Dummy variant writer not yet supported");
}
final AbstractObjectWriter[] variants = new AbstractObjectWriter[MinorType.values().length];
final MetadataProvider mdProvider = descrip.childProvider();
int i = 0;
for (final MinorType type : vector.getField().getType().getSubTypeList()) {
// This call will create the vector if it does not yet exist.
// Will throw an exception for unsupported types.
// so call this only if the MajorType reports that the type
// already exists.
final ValueVector memberVector = vector.getMember(type);
final VectorDescrip memberDescrip = new VectorDescrip(mdProvider, i++, memberVector.getField());
variants[type.ordinal()] = buildVectorWriter(memberVector, memberDescrip);
}
return new VariantObjectWriter(new UnionWriterImpl(descrip.metadata, vector, variants));
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class TypeCastRules method getLeastRestrictiveType.
/*
* Function checks if casting is allowed from the 'from' -> 'to' minor type. If its allowed
* we also check if the precedence map allows such a cast and return true if both cases are satisfied
*/
public static MinorType getLeastRestrictiveType(List<MinorType> types) {
assert types.size() >= 2;
MinorType result = types.get(0);
if (result == MinorType.UNION) {
return result;
}
int resultPrec = ResolverTypePrecedence.precedenceMap.get(result);
for (int i = 1; i < types.size(); i++) {
MinorType next = types.get(i);
if (next == MinorType.UNION) {
return next;
}
if (next == result) {
// both args are of the same type; continue
continue;
}
int nextPrec = ResolverTypePrecedence.precedenceMap.get(next);
if (isCastable(next, result) && resultPrec >= nextPrec) {
// result is the least restrictive between the two args; nothing to do continue
continue;
} else if (isCastable(result, next) && nextPrec >= resultPrec) {
result = next;
resultPrec = nextPrec;
} else {
return null;
}
}
return result;
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class TypeCastRules method getCost.
/**
* Decide whether it's legal to do implicit cast. -1 : not allowed for
* implicit cast > 0: cost associated with implicit cast. ==0: params are
* exactly same type of arg. No need of implicit.
*/
public static int getCost(List<MajorType> argumentTypes, DrillFuncHolder holder) {
int cost = 0;
if (argumentTypes.size() != holder.getParamCount() && !holder.isVarArg()) {
return -1;
}
// Indicates whether we used secondary cast rules
boolean secondaryCast = false;
// number of arguments that could implicitly casts using precedence map or
// didn't require casting at all
int nCasts = 0;
/*
* If we are determining function holder for decimal data type, we need to
* make sure the output type of the function can fit the precision that we
* need based on the input types.
*/
if (holder.checkPrecisionRange()) {
List<LogicalExpression> logicalExpressions = Lists.newArrayList();
for (MajorType majorType : argumentTypes) {
logicalExpressions.add(new MajorTypeInLogicalExpression(majorType));
}
if (DRILL_REL_DATATYPE_SYSTEM.getMaxNumericPrecision() < holder.getReturnType(logicalExpressions).getPrecision()) {
return -1;
}
}
final int numOfArgs = argumentTypes.size();
for (int i = 0; i < numOfArgs; i++) {
final MajorType argType = argumentTypes.get(i);
final MajorType paramType = holder.getParamMajorType(i);
// @Param FieldReader will match any type
if (holder.isFieldReader(i)) {
// if (Types.isComplex(call.args.get(i).getMajorType()) ||Types.isRepeated(call.args.get(i).getMajorType()) )
// add the max cost when encountered with a field reader considering
// that it is the most expensive factor contributing to the cost.
cost += ResolverTypePrecedence.MAX_IMPLICIT_CAST_COST;
continue;
}
if (!TypeCastRules.isCastableWithNullHandling(argType, paramType, holder.getNullHandling())) {
return -1;
}
Integer paramVal = ResolverTypePrecedence.precedenceMap.get(paramType.getMinorType());
Integer argVal = ResolverTypePrecedence.precedenceMap.get(argType.getMinorType());
if (paramVal == null) {
throw new RuntimeException(String.format("Precedence for type %s is not defined", paramType.getMinorType().name()));
}
if (argVal == null) {
throw new RuntimeException(String.format("Precedence for type %s is not defined", argType.getMinorType().name()));
}
if (paramVal - argVal < 0) {
/* Precedence rules do not allow to implicit cast, however check
* if the secondary rules allow us to cast
*/
Set<MinorType> rules;
if ((rules = (ResolverTypePrecedence.secondaryImplicitCastRules.get(paramType.getMinorType()))) != null && rules.contains(argType.getMinorType())) {
secondaryCast = true;
} else {
return -1;
}
}
// Otherwise, the function implementation is not a match.
if (argType.getMode() != paramType.getMode()) {
// this allows for a non-nullable implementation to be preferred
if (holder.getNullHandling() == NullHandling.INTERNAL) {
// a function that expects required output, but nullable was provided
if (paramType.getMode() == DataMode.REQUIRED && argType.getMode() == DataMode.OPTIONAL) {
return -1;
} else if (paramType.getMode() == DataMode.OPTIONAL && argType.getMode() == DataMode.REQUIRED) {
cost += DATAMODE_CAST_COST;
}
}
}
int castCost;
if ((castCost = (paramVal - argVal)) >= 0) {
nCasts++;
cost += castCost;
}
}
if (holder.isVarArg()) {
int varArgIndex = holder.getParamCount() - 1;
for (int i = varArgIndex; i < numOfArgs; i++) {
if (holder.isFieldReader(varArgIndex)) {
break;
} else if (holder.getParamMajorType(varArgIndex).getMode() == DataMode.REQUIRED && holder.getParamMajorType(varArgIndex).getMode() != argumentTypes.get(i).getMode()) {
// if function accepts required arguments, but provided optional
return -1;
}
}
// increase cost for var arg functions to prioritize regular ones
Integer additionalCost = ResolverTypePrecedence.precedenceMap.get(holder.getParamMajorType(varArgIndex).getMinorType());
cost += additionalCost != null ? additionalCost : VARARG_COST;
cost += holder.getParamMajorType(varArgIndex).getMode() == DataMode.REQUIRED ? 0 : 1;
}
if (secondaryCast) {
// We have a secondary cast for one or more of the arguments, determine the cost associated
int secondaryCastCost = Integer.MAX_VALUE - 1;
// Subtract maximum possible implicit costs from the secondary cast cost
secondaryCastCost -= (nCasts * (ResolverTypePrecedence.MAX_IMPLICIT_CAST_COST + DATAMODE_CAST_COST));
// Add cost of implicitly casting the rest of the arguments that didn't use secondary casting
secondaryCastCost += cost;
return secondaryCastCost;
}
return cost;
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class SchemaUtil method mergeSchemas.
/**
* Returns the merger of schemas. The merged schema will include the union all columns. If there is a type conflict
* between columns with the same schemapath but different types, the merged schema will contain a Union type.
* @param schemas
* @return
*/
public static BatchSchema mergeSchemas(BatchSchema... schemas) {
Map<SchemaPath, Set<MinorType>> typeSetMap = Maps.newLinkedHashMap();
for (BatchSchema s : schemas) {
for (MaterializedField field : s) {
SchemaPath path = SchemaPath.getSimplePath(field.getName());
Set<MinorType> currentTypes = typeSetMap.get(path);
if (currentTypes == null) {
currentTypes = Sets.newHashSet();
typeSetMap.put(path, currentTypes);
}
MinorType newType = field.getType().getMinorType();
if (newType == MinorType.MAP || newType == MinorType.LIST) {
throw new RuntimeException("Schema change not currently supported for schemas with complex types");
}
if (newType == MinorType.UNION) {
currentTypes.addAll(field.getType().getSubTypeList());
} else {
currentTypes.add(newType);
}
}
}
List<MaterializedField> fields = Lists.newArrayList();
for (SchemaPath path : typeSetMap.keySet()) {
Set<MinorType> types = typeSetMap.get(path);
if (types.size() > 1) {
MajorType.Builder builder = MajorType.newBuilder().setMinorType(MinorType.UNION).setMode(DataMode.OPTIONAL);
for (MinorType t : types) {
builder.addSubType(t);
}
MaterializedField field = MaterializedField.create(path.getLastSegment().getNameSegment().getPath(), builder.build());
fields.add(field);
} else {
MaterializedField field = MaterializedField.create(path.getLastSegment().getNameSegment().getPath(), Types.optional(types.iterator().next()));
fields.add(field);
}
}
SchemaBuilder schemaBuilder = new SchemaBuilder();
BatchSchema s = schemaBuilder.addFields(fields).setSelectionVectorMode(schemas[0].getSelectionVectorMode()).build();
return s;
}
Aggregations