use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class MappifyUtility method mappify.
public static DrillBuf mappify(FieldReader reader, BaseWriter.ComplexWriter writer, DrillBuf buffer) {
// Currently we expect single map as input
if (DataMode.REPEATED == reader.getType().getMode() || !(reader.getType().getMinorType() == TypeProtos.MinorType.MAP)) {
throw new DrillRuntimeException("kvgen function only supports Simple maps as input");
}
BaseWriter.ListWriter listWriter = writer.rootAsList();
listWriter.startList();
BaseWriter.MapWriter mapWriter = listWriter.map();
// Iterate over the fields in the map
Iterator<String> fieldIterator = reader.iterator();
while (fieldIterator.hasNext()) {
String str = fieldIterator.next();
FieldReader fieldReader = reader.reader(str);
// Skip the field if its null
if (fieldReader.isSet() == false) {
mapWriter.end();
continue;
}
// writing a new field, start a new map
mapWriter.start();
// write "key":"columnname" into the map
VarCharHolder vh = new VarCharHolder();
byte[] b = str.getBytes(Charsets.UTF_8);
buffer = buffer.reallocIfNeeded(b.length);
buffer.setBytes(0, b);
vh.start = 0;
vh.end = b.length;
vh.buffer = buffer;
mapWriter.varChar(fieldKey).write(vh);
// Write the value to the map
MapUtility.writeToMapFromReader(fieldReader, mapWriter);
mapWriter.end();
}
listWriter.endList();
return buffer;
}
use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class InterpreterEvaluator method evaluateFunction.
public static ValueHolder evaluateFunction(DrillSimpleFunc interpreter, ValueHolder[] args, String funcName) throws Exception {
Preconditions.checkArgument(interpreter != null, "interpreter could not be null when use interpreted model to evaluate function " + funcName);
// the current input index to assign into the next available parameter, found using the @Param notation
// the order parameters are declared in the java class for the DrillFunc is meaningful
int currParameterIndex = 0;
Field outField = null;
try {
Field[] fields = interpreter.getClass().getDeclaredFields();
for (Field f : fields) {
// if this is annotated as a parameter to the function
if (f.getAnnotation(Param.class) != null) {
f.setAccessible(true);
if (currParameterIndex < args.length) {
f.set(interpreter, args[currParameterIndex]);
}
currParameterIndex++;
} else if (f.getAnnotation(Output.class) != null) {
f.setAccessible(true);
outField = f;
// create an instance of the holder for the output to be stored in
f.set(interpreter, f.getType().newInstance());
}
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
if (args.length != currParameterIndex) {
throw new DrillRuntimeException(String.format("Wrong number of parameters provided to interpreted expression evaluation " + "for function %s, expected %d parameters, but received %d.", funcName, currParameterIndex, args.length));
}
if (outField == null) {
throw new DrillRuntimeException("Malformed DrillFunction without a return type: " + funcName);
}
interpreter.setup();
interpreter.eval();
ValueHolder out = (ValueHolder) outField.get(interpreter);
return out;
}
use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class FunctionImplementationRegistry method getLocalUdfDir.
/**
* Creates local udf directory, if it doesn't exist.
* Checks if local udf directory is a directory and if current application has write rights on it.
* Attempts to clean up local udf directory in case jars were left after previous drillbit run.
*
* @param config drill config
* @return path to local udf directory
*/
private Path getLocalUdfDir(DrillConfig config) {
tmpDir = getTmpDir(config);
File udfDir = new File(tmpDir, config.getString(ExecConstants.UDF_DIRECTORY_LOCAL));
String udfPath = udfDir.getPath();
if (udfDir.mkdirs()) {
logger.debug("Local udf directory [{}] was created", udfPath);
}
Preconditions.checkState(udfDir.exists(), "Local udf directory [%s] must exist", udfPath);
Preconditions.checkState(udfDir.isDirectory(), "Local udf directory [%s] must be a directory", udfPath);
Preconditions.checkState(udfDir.canWrite(), "Local udf directory [%s] must be writable for application user", udfPath);
try {
FileUtils.cleanDirectory(udfDir);
} catch (IOException e) {
throw new DrillRuntimeException("Error during local udf directory clean up", e);
}
logger.info("Created and validated local udf directory [{}]", udfPath);
return new Path(udfDir.toURI());
}
use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class DrillPushProjIntoScan method onMatch.
@Override
public void onMatch(RelOptRuleCall call) {
final Project proj = (Project) call.rel(0);
final TableScan scan = (TableScan) call.rel(1);
try {
ProjectPushInfo columnInfo = PrelUtil.getColumns(scan.getRowType(), proj.getProjects());
// get DrillTable, either wrapped in RelOptTable, or DrillTranslatableTable.
DrillTable table = scan.getTable().unwrap(DrillTable.class);
if (table == null) {
table = scan.getTable().unwrap(DrillTranslatableTable.class).getDrillTable();
}
if (//
columnInfo == null || columnInfo.isStarQuery() || !//
table.getGroupScan().canPushdownProjects(columnInfo.columns)) {
return;
}
final DrillScanRel newScan = new DrillScanRel(scan.getCluster(), scan.getTraitSet().plus(DrillRel.DRILL_LOGICAL), scan.getTable(), columnInfo.createNewRowType(proj.getInput().getCluster().getTypeFactory()), columnInfo.columns);
List<RexNode> newProjects = Lists.newArrayList();
for (RexNode n : proj.getChildExps()) {
newProjects.add(n.accept(columnInfo.getInputRewriter()));
}
final DrillProjectRel newProj = new DrillProjectRel(proj.getCluster(), proj.getTraitSet().plus(DrillRel.DRILL_LOGICAL), newScan, newProjects, proj.getRowType());
if (ProjectRemoveRule.isTrivial(newProj)) {
call.transformTo(newScan);
} else {
call.transformTo(newProj);
}
} catch (IOException e) {
throw new DrillRuntimeException(e);
}
}
use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class BsonRecordReader method writeToListOrMap.
private void writeToListOrMap(BsonReader reader, final MapOrListWriterImpl writer, boolean isList, String fieldName) {
writer.start();
// writing
while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) {
if (!isList) {
fieldName = reader.readName();
}
BsonType currentBsonType = reader.getCurrentBsonType();
switch(currentBsonType) {
case INT32:
int readInt32 = reader.readInt32();
if (readNumbersAsDouble) {
writeDouble(readInt32, writer, fieldName, isList);
} else {
writeInt32(readInt32, writer, fieldName, isList);
}
atLeastOneWrite = true;
break;
case INT64:
long readInt64 = reader.readInt64();
if (readNumbersAsDouble) {
writeDouble(readInt64, writer, fieldName, isList);
} else {
writeInt64(readInt64, writer, fieldName, isList);
}
atLeastOneWrite = true;
break;
case ARRAY:
reader.readStartArray();
writeToListOrMap(reader, (MapOrListWriterImpl) writer.list(fieldName), true, fieldName);
atLeastOneWrite = true;
break;
case BINARY:
// handle types
writeBinary(reader, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case BOOLEAN:
boolean readBoolean = reader.readBoolean();
writeBoolean(readBoolean, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case DATE_TIME:
long readDateTime = reader.readDateTime();
writeDateTime(readDateTime, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case DOCUMENT:
reader.readStartDocument();
// To handle nested Documents.
MapOrListWriterImpl _writer = writer;
if (!isList) {
_writer = (MapOrListWriterImpl) writer.map(fieldName);
} else {
_writer = (MapOrListWriterImpl) writer.listoftmap(fieldName);
}
writeToListOrMap(reader, _writer, false, fieldName);
atLeastOneWrite = true;
break;
case DOUBLE:
double readDouble = reader.readDouble();
writeDouble(readDouble, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case JAVASCRIPT:
final String readJavaScript = reader.readJavaScript();
writeString(readJavaScript, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case JAVASCRIPT_WITH_SCOPE:
final String readJavaScriptWithScopeString = reader.readJavaScriptWithScope();
writeString(readJavaScriptWithScopeString, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case NULL:
// just read and ignore.
reader.readNull();
break;
case OBJECT_ID:
writeObjectId(reader, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case STRING:
final String readString = reader.readString();
writeString(readString, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case SYMBOL:
final String readSymbol = reader.readSymbol();
writeString(readSymbol, writer, fieldName, isList);
atLeastOneWrite = true;
break;
case TIMESTAMP:
int time = reader.readTimestamp().getTime();
writeTimeStamp(time, writer, fieldName, isList);
atLeastOneWrite = true;
break;
default:
// Didn't handled REGULAR_EXPRESSION and DB_POINTER types
throw new DrillRuntimeException("UnSupported Bson type: " + currentBsonType);
}
}
if (!isList) {
reader.readEndDocument();
} else {
reader.readEndArray();
}
}
Aggregations