use of org.apache.hyracks.algebricks.common.exceptions.NotImplementedException in project asterixdb by apache.
the class AdmDataGen method dataGen.
public void dataGen() throws Exception {
for (Map.Entry<TypeSignature, IAType> me : typeMap.entrySet()) {
TypeSignature tn = me.getKey();
TypeDataGen tdg = typeAnnotMap.get(tn);
if (tdg.isDataGen()) {
IAType t = me.getValue();
if (t.getTypeTag() != ATypeTag.OBJECT) {
throw new NotImplementedException();
}
ARecordType rt = (ARecordType) t;
RecordDataGenAnnotation dga = firstDataGenAnnotation(rt);
if (dga == null) {
throw new Exception("No data generator annotations for type " + tn);
}
File outFile = new File(outputDir + File.separator + tdg.getOutputFileName());
PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outFile)));
RecordGenerator rg = new RecordGenerator(rt, dga, "\n");
rg.init(outStream, dgCtx);
for (long i = 0; i < tdg.getNumValues(); i++) {
rg.generate();
}
outStream.close();
}
}
}
use of org.apache.hyracks.algebricks.common.exceptions.NotImplementedException in project asterixdb by apache.
the class ExternalDataUtils method getValueParserFactories.
public static IValueParserFactory[] getValueParserFactories(ARecordType recordType) {
int n = recordType.getFieldTypes().length;
IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
for (int i = 0; i < n; i++) {
ATypeTag tag = null;
if (recordType.getFieldTypes()[i].getTypeTag() == ATypeTag.UNION) {
AUnionType unionType = (AUnionType) recordType.getFieldTypes()[i];
if (!unionType.isUnknownableType()) {
throw new NotImplementedException("Non-optional UNION type is not supported.");
}
tag = unionType.getActualType().getTypeTag();
} else {
tag = recordType.getFieldTypes()[i].getTypeTag();
}
if (tag == null) {
throw new NotImplementedException("Failed to get the type information for field " + i + ".");
}
fieldParserFactories[i] = getParserFactory(tag);
}
return fieldParserFactories;
}
use of org.apache.hyracks.algebricks.common.exceptions.NotImplementedException in project asterixdb by apache.
the class NestedLoopJoinPOperator method getRequiredPropertiesForChildren.
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
if (partitioningType != JoinPartitioningType.BROADCAST) {
throw new NotImplementedException(partitioningType + " nested loop joins are not implemented.");
}
StructuralPropertiesVector[] pv = new StructuralPropertiesVector[2];
// TODO: leverage statistics to make better decisions.
pv[0] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(new RandomPartitioningProperty(context.getComputationNodeDomain()), null));
pv[1] = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(new BroadcastPartitioningProperty(context.getComputationNodeDomain()), null));
return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
use of org.apache.hyracks.algebricks.common.exceptions.NotImplementedException in project asterixdb by apache.
the class NestedLoopJoinPOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op;
RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IOperatorSchema[] conditionInputSchemas = new IOperatorSchema[1];
conditionInputSchemas[0] = propagatedSchema;
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory(join.getCondition().getValue(), context.getTypeEnvironment(op), conditionInputSchemas, context);
ITuplePairComparatorFactory comparatorFactory = new TuplePairEvaluatorFactory(cond, context.getBinaryBooleanInspectorFactory());
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IOperatorDescriptor opDesc = null;
switch(kind) {
case INNER:
{
opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize, false, null);
break;
}
case LEFT_OUTER:
{
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1].getSize()];
for (int j = 0; j < nonMatchWriterFactories.length; j++) {
nonMatchWriterFactories[j] = context.getMissingWriterFactory();
}
opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize, true, nonMatchWriterFactories);
break;
}
default:
{
throw new NotImplementedException();
}
}
contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
ILogicalOperator src1 = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src1, 0, op, 0);
ILogicalOperator src2 = op.getInputs().get(1).getValue();
builder.contributeGraphEdge(src2, 0, op, 1);
}
use of org.apache.hyracks.algebricks.common.exceptions.NotImplementedException in project asterixdb by apache.
the class JoinMultiComparator method generateHashJoinRuntime.
private IOperatorDescriptor generateHashJoinRuntime(JobGenContext context, IOperatorSchema[] inputSchemas, int[] keysLeft, int[] keysRight, IBinaryHashFunctionFactory[] hashFunFactories, IBinaryComparatorFactory[] comparatorFactories, IPredicateEvaluatorFactory predEvaluatorFactory, RecordDescriptor recDescriptor, IOperatorDescriptorRegistry spec) throws AlgebricksException {
IOperatorDescriptor opDesc;
try {
switch(kind) {
case INNER:
opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(), maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight, hashFunFactories, comparatorFactories, recDescriptor, predEvaluatorFactory, false, null);
break;
case LEFT_OUTER:
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1].getSize()];
for (int j = 0; j < nonMatchWriterFactories.length; j++) {
nonMatchWriterFactories[j] = context.getMissingWriterFactory();
}
opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(), maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight, hashFunFactories, comparatorFactories, recDescriptor, predEvaluatorFactory, true, nonMatchWriterFactories);
break;
default:
throw new NotImplementedException();
}
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
return opDesc;
}
Aggregations