use of org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference in project asterixdb by apache.
the class GetRecordFieldValueEvalFactory method createScalarEvaluator.
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private final DataOutput out = resultStorage.getDataOutput();
private final IPointable inputArg0 = new VoidPointable();
private final IPointable inputArg1 = new VoidPointable();
private final IScalarEvaluator recordEval = recordEvalFactory.createScalarEvaluator(ctx);
private final IScalarEvaluator fieldNameEval = fldNameEvalFactory.createScalarEvaluator(ctx);
private final RuntimeRecordTypeInfo recTypeInfo = new RuntimeRecordTypeInfo();
{
recTypeInfo.reset(recordType);
}
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
try {
resultStorage.reset();
fieldNameEval.evaluate(tuple, inputArg1);
byte[] serFldName = inputArg1.getByteArray();
int serFldNameOffset = inputArg1.getStartOffset();
int serFldNameLen = inputArg1.getLength();
recordEval.evaluate(tuple, inputArg0);
byte[] serRecord = inputArg0.getByteArray();
int serRecordOffset = inputArg0.getStartOffset();
int serRecordLen = inputArg0.getLength();
if (serRecord[serRecordOffset] != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new TypeMismatchException(BuiltinFunctions.GET_RECORD_FIELD_VALUE, 0, serRecord[serRecordOffset], ATypeTag.SERIALIZED_RECORD_TYPE_TAG);
}
int subFieldOffset = -1;
int subFieldLength = -1;
// Look at closed fields first.
int subFieldIndex = recTypeInfo.getFieldIndex(serFldName, serFldNameOffset + 1, serFldNameLen - 1);
if (subFieldIndex >= 0) {
int nullBitmapSize = RecordUtil.computeNullBitmapSize(recordType);
subFieldOffset = ARecordSerializerDeserializer.getFieldOffsetById(serRecord, serRecordOffset, subFieldIndex, nullBitmapSize, recordType.isOpen());
if (subFieldOffset == 0) {
// the field is null, we checked the null bit map
out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
result.set(resultStorage);
return;
}
ATypeTag fieldTypeTag = recordType.getFieldTypes()[subFieldIndex].getTypeTag();
subFieldLength = NonTaggedFormatUtil.getFieldValueLength(serRecord, subFieldOffset, fieldTypeTag, false);
// write result.
out.writeByte(fieldTypeTag.serialize());
out.write(serRecord, subFieldOffset, subFieldLength);
result.set(resultStorage);
return;
}
// Look at open fields.
subFieldOffset = ARecordSerializerDeserializer.getFieldOffsetByName(serRecord, serRecordOffset, serRecordLen, serFldName, serFldNameOffset);
if (subFieldOffset < 0) {
out.writeByte(ATypeTag.SERIALIZED_MISSING_TYPE_TAG);
result.set(resultStorage);
return;
}
// Get the field length.
ATypeTag fieldValueTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serRecord[subFieldOffset]);
subFieldLength = NonTaggedFormatUtil.getFieldValueLength(serRecord, subFieldOffset, fieldValueTypeTag, true) + 1;
// write result.
result.set(serRecord, subFieldOffset, subFieldLength);
} catch (IOException e) {
throw new HyracksDataException(e);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
}
};
}
use of org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference in project asterixdb by apache.
the class RecordAddFieldsDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
final PointableAllocator allocator = new PointableAllocator();
final IVisitablePointable vp0 = allocator.allocateRecordValue(inRecType);
final IVisitablePointable vp1 = allocator.allocateListValue(inListType);
final IPointable argPtr0 = new VoidPointable();
final IPointable argPtr1 = new VoidPointable();
final IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
final IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
final ArrayBackedValueStorage fieldNamePointable = new ArrayBackedValueStorage();
final ArrayBackedValueStorage fieldValuePointer = new ArrayBackedValueStorage();
final PointableHelper pointableHelper = new PointableHelper();
try {
pointableHelper.serializeString("field-name", fieldNamePointable, true);
pointableHelper.serializeString("field-value", fieldValuePointer, true);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
return new IScalarEvaluator() {
// the default 32k frame size
public static final int TABLE_FRAME_SIZE = 32768;
// the default 32k frame size
public static final int TABLE_SIZE = 100;
private final RecordBuilder recordBuilder = new RecordBuilder();
private final RuntimeRecordTypeInfo requiredRecordTypeInfo = new RuntimeRecordTypeInfo();
private final IBinaryHashFunction putHashFunc = ListItemBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
private final IBinaryHashFunction getHashFunc = ListItemBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
private final BinaryEntry keyEntry = new BinaryEntry();
private final BinaryEntry valEntry = new BinaryEntry();
private final IVisitablePointable tempValReference = allocator.allocateEmpty();
private final IBinaryComparator cmp = ListItemBinaryComparatorFactory.INSTANCE.createBinaryComparator();
private BinaryHashMap hashMap = new BinaryHashMap(TABLE_SIZE, TABLE_FRAME_SIZE, putHashFunc, getHashFunc, cmp);
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
recordBuilder.reset(outRecType);
requiredRecordTypeInfo.reset(outRecType);
eval0.evaluate(tuple, argPtr0);
eval1.evaluate(tuple, argPtr1);
// Make sure we get a valid record
byte typeTag0 = argPtr0.getByteArray()[argPtr0.getStartOffset()];
if (typeTag0 != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 0, typeTag0, ATypeTag.SERIALIZED_RECORD_TYPE_TAG);
}
// Make sure we get a valid list
byte typeTag1 = argPtr1.getByteArray()[argPtr1.getStartOffset()];
if (typeTag1 != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 1, typeTag1, ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG);
}
vp0.set(argPtr0);
vp1.set(argPtr1);
ARecordVisitablePointable recordPointable = (ARecordVisitablePointable) vp0;
AListVisitablePointable listPointable = (AListVisitablePointable) vp1;
// Initialize our hashmap
int tableSize = recordPointable.getFieldNames().size() + listPointable.getItems().size();
// Thus avoiding unnecessary object construction
if (hashMap == null || tableSize > TABLE_SIZE) {
hashMap = new BinaryHashMap(tableSize, TABLE_FRAME_SIZE, putHashFunc, getHashFunc, cmp);
} else {
hashMap.clear();
}
addFields(recordPointable, listPointable);
recordBuilder.write(out, true);
result.set(resultStorage);
}
private void addFields(ARecordVisitablePointable inputRecordPointer, AListVisitablePointable listPointable) throws HyracksDataException {
List<IVisitablePointable> inputRecordFieldNames = inputRecordPointer.getFieldNames();
List<IVisitablePointable> inputRecordFieldValues = inputRecordPointer.getFieldValues();
List<IVisitablePointable> inputFields = listPointable.getItems();
IVisitablePointable namePointable = null;
IVisitablePointable valuePointable = null;
int numInputRecordFields = inputRecordFieldNames.size();
try {
// Add original record without duplicate checking
for (int i = 0; i < numInputRecordFields; ++i) {
IVisitablePointable fnp = inputRecordFieldNames.get(i);
IVisitablePointable fvp = inputRecordFieldValues.get(i);
int pos = requiredRecordTypeInfo.getFieldIndex(fnp.getByteArray(), fnp.getStartOffset() + 1, fnp.getLength() - 1);
if (pos >= 0) {
recordBuilder.addField(pos, fvp);
} else {
recordBuilder.addField(fnp, fvp);
}
keyEntry.set(fnp.getByteArray(), fnp.getStartOffset(), fnp.getLength());
valEntry.set(fvp.getByteArray(), fvp.getStartOffset(), fvp.getLength());
hashMap.put(keyEntry, valEntry);
}
// Get the fields from a list of records
for (int i = 0; i < inputFields.size(); i++) {
if (!PointableHelper.sameType(ATypeTag.OBJECT, inputFields.get(i))) {
throw new AsterixException("Expected list of record, got " + PointableHelper.getTypeTag(inputFields.get(i)));
}
List<IVisitablePointable> names = ((ARecordVisitablePointable) inputFields.get(i)).getFieldNames();
List<IVisitablePointable> values = ((ARecordVisitablePointable) inputFields.get(i)).getFieldValues();
// Get name and value of the field to be added
// Use loop to account for the cases where users switches the order of the fields
IVisitablePointable fieldName;
for (int j = 0; j < names.size(); j++) {
fieldName = names.get(j);
// if fieldName is "field-name" then read the name
if (PointableHelper.byteArrayEqual(fieldNamePointable, fieldName)) {
namePointable = values.get(j);
} else {
// otherwise the fieldName is "field-value". Thus, read the value
valuePointable = values.get(j);
}
}
if (namePointable == null || valuePointable == null) {
throw new InvalidDataFormatException(getIdentifier(), "fields to be added");
}
// Check that the field being added is a valid field
int pos = requiredRecordTypeInfo.getFieldIndex(namePointable.getByteArray(), namePointable.getStartOffset() + 1, namePointable.getLength() - 1);
keyEntry.set(namePointable.getByteArray(), namePointable.getStartOffset(), namePointable.getLength());
// Check if already in our built record
BinaryEntry entry = hashMap.get(keyEntry);
if (entry != null) {
tempValReference.set(entry.getBuf(), entry.getOffset(), entry.getLength());
// If value is not equal throw conflicting duplicate field, otherwise ignore
if (!PointableHelper.byteArrayEqual(valuePointable, tempValReference)) {
throw new RuntimeDataException(ErrorCode.DUPLICATE_FIELD_NAME, getIdentifier());
}
} else {
if (pos > -1) {
recordBuilder.addField(pos, valuePointable);
} else {
recordBuilder.addField(namePointable, valuePointable);
}
valEntry.set(valuePointable.getByteArray(), valuePointable.getStartOffset(), valuePointable.getLength());
hashMap.put(keyEntry, valEntry);
}
}
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
}
};
}
};
}
use of org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference in project asterixdb by apache.
the class StringLengthDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private AMutableInt64 result = new AMutableInt64(0);
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
private IPointable inputArg = new VoidPointable();
private IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable resultPointable) throws HyracksDataException {
try {
resultStorage.reset();
eval.evaluate(tuple, inputArg);
byte[] serString = inputArg.getByteArray();
int offset = inputArg.getStartOffset();
if (serString[offset] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
int len = UTF8StringUtil.getUTFLength(serString, offset + 1);
result.setValue(len);
int64Serde.serialize(result, out);
} else {
throw new TypeMismatchException(getIdentifier(), 0, serString[offset], ATypeTag.SERIALIZED_STRING_TYPE_TAG);
}
resultPointable.set(resultStorage);
} catch (IOException e1) {
throw new HyracksDataException(e1);
}
}
};
}
};
}
use of org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference in project asterixdb by apache.
the class StringRepeatDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
// Argument evaluators.
private IScalarEvaluator evalString = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator evalStart = args[1].createScalarEvaluator(ctx);
// Argument pointers.
private IPointable argString = new VoidPointable();
private IPointable argNumber = new VoidPointable();
// For outputting the result.
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
private byte[] tempLengthArray = new byte[5];
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
// Calls argument evaluators.
evalString.evaluate(tuple, argString);
evalStart.evaluate(tuple, argNumber);
// Gets the repeating times.
byte[] bytes = argNumber.getByteArray();
int offset = argNumber.getStartOffset();
int repeatingTimes = ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 1, bytes, offset);
// Checks repeatingTimes. It should be a non-negative value.
if (repeatingTimes < 0) {
throw new RuntimeDataException(ErrorCode.NEGATIVE_VALUE, getIdentifier(), 1, repeatingTimes);
}
// Gets the input string.
bytes = argString.getByteArray();
offset = argString.getStartOffset();
// Checks the type of the string argument.
if (bytes[offset] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 0, bytes[offset], ATypeTag.SERIALIZED_STRING_TYPE_TAG);
}
// Calculates the result string length.
int inputLen = UTF8StringUtil.getUTFLength(bytes, offset + 1);
// Can throw overflow exception.
int resultLen = Math.multiplyExact(inputLen, repeatingTimes);
int cbytes = UTF8StringUtil.encodeUTF8Length(resultLen, tempLengthArray, 0);
// Writes the output string.
int inputStringStart = offset + 1 + UTF8StringUtil.getNumBytesToStoreLength(inputLen);
try {
out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
out.write(tempLengthArray, 0, cbytes);
for (int numRepeats = 0; numRepeats < repeatingTimes; ++numRepeats) {
out.write(bytes, inputStringStart, inputLen);
}
} catch (IOException e) {
throw new HyracksDataException(e);
}
result.set(resultStorage);
}
};
}
};
}
use of org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference in project asterixdb by apache.
the class SubstringAfterDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
private IPointable array0 = new VoidPointable();
private IPointable array1 = new VoidPointable();
private IScalarEvaluator evalString = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator evalPattern = args[1].createScalarEvaluator(ctx);
private final GrowableArray array = new GrowableArray();
private final UTF8StringBuilder builder = new UTF8StringBuilder();
private final UTF8StringPointable stringPtr = new UTF8StringPointable();
private final UTF8StringPointable patternPtr = new UTF8StringPointable();
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
evalString.evaluate(tuple, array0);
byte[] src = array0.getByteArray();
int srcOffset = array0.getStartOffset();
int srcLen = array0.getLength();
evalPattern.evaluate(tuple, array1);
byte[] pattern = array1.getByteArray();
int patternOffset = array1.getStartOffset();
int patternLen = array1.getLength();
if (src[srcOffset] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 0, src[srcOffset], ATypeTag.SERIALIZED_STRING_TYPE_TAG);
}
if (pattern[patternOffset] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 1, pattern[patternOffset], ATypeTag.SERIALIZED_STRING_TYPE_TAG);
}
try {
stringPtr.set(src, srcOffset + 1, srcLen - 1);
patternPtr.set(pattern, patternOffset + 1, patternLen - 1);
array.reset();
UTF8StringPointable.substrAfter(stringPtr, patternPtr, builder, array);
out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
out.write(array.getByteArray(), 0, array.getLength());
} catch (IOException e) {
throw new HyracksDataException(e);
}
result.set(resultStorage);
}
};
}
};
}
Aggregations