use of java.io.DataOutput in project asterixdb by apache.
the class LongParserFactory method createValueParser.
@Override
public IValueParser createValueParser() {
return new IValueParser() {
@Override
public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
long n = 0;
int sign = 1;
int i = 0;
boolean pre = true;
for (; pre && i < length; ++i) {
char ch = buffer[i + start];
switch(ch) {
case ' ':
case '\t':
case '\n':
case '\r':
case '\f':
break;
case '-':
sign = -1;
pre = false;
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
pre = false;
n = n * 10 + (ch - '0');
break;
default:
String errorString = new String(buffer, i + start, length - i);
throw new HyracksDataException("Long Parser - a digit is expected. But, encountered this character: " + ch + " in the incoming input: " + errorString);
}
}
boolean post = false;
for (; !post && i < length; ++i) {
char ch = buffer[i + start];
switch(ch) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
n = n * 10 + (ch - '0');
break;
default:
String errorString = new String(buffer, i + start, length - i);
throw new HyracksDataException("Long Parser - a digit is expected. But, encountered this character: " + ch + " in the incoming input: " + errorString);
}
}
for (; i < length; ++i) {
char ch = buffer[i + start];
switch(ch) {
case ' ':
case '\t':
case '\n':
case '\r':
case '\f':
break;
default:
String errorString = new String(buffer, i + start, length - i);
throw new HyracksDataException("Long Parser - a whitespace, tab, new line, or form-feed expected. " + "But, encountered this character: " + ch + " in the incoming input: " + errorString);
}
}
try {
out.writeLong(n * sign);
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
};
}
use of java.io.DataOutput in project asterixdb by apache.
the class TupleUtils method createTuple.
@SuppressWarnings("unchecked")
public static void createTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, ISerializerDeserializer[] fieldSerdes, boolean filtered, final Object... fields) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
tupleBuilder.reset();
int numFields = Math.min(tupleBuilder.getFieldEndOffsets().length, fields.length);
for (int i = 0; i < numFields; i++) {
fieldSerdes[i].serialize(fields[i], dos);
tupleBuilder.addFieldEndOffset();
}
if (filtered) {
fieldSerdes[0].serialize(fields[0], dos);
tupleBuilder.addFieldEndOffset();
}
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
}
use of java.io.DataOutput in project asterixdb by apache.
the class TupleUtils method createIntegerTuple.
public static void createIntegerTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, boolean filtered, final int... fields) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
tupleBuilder.reset();
for (final int i : fields) {
IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
tupleBuilder.addFieldEndOffset();
}
if (filtered) {
IntegerSerializerDeserializer.INSTANCE.serialize(fields[0], dos);
tupleBuilder.addFieldEndOffset();
}
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
}
use of java.io.DataOutput in project asterixdb by apache.
the class LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest method shouldWriteFilterValueIfAppendFilterIsTrue.
@Test
public void shouldWriteFilterValueIfAppendFilterIsTrue() throws Exception {
JobSpecification spec = new JobSpecification();
// build tuple
ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
DataOutput dos = tb.getDataOutput();
tb.reset();
DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
tb.addFieldEndOffset();
ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
int[] keyFields = { 0, 1, 2, 3 };
RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryWithFilterRecDesc, keyFields, true, true, secondaryHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, null, null, false);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, printer, 0);
spec.addRoot(printer);
runTest(spec);
}
use of java.io.DataOutput in project asterixdb by apache.
the class AbstractRTreeOperatorTest method loadSecondaryIndex.
protected void loadSecondaryIndex() throws Exception {
JobSpecification spec = new JobSpecification();
// build dummy tuple containing nothing
ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
DataOutput dos = tb.getDataOutput();
tb.reset();
new UTF8StringSerializerDeserializer().serialize("0", dos);
tb.addFieldEndOffset();
ISerializerDeserializer[] keyRecDescSers = { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
// - infinity
int[] lowKeyFields = null;
// + infinity
int[] highKeyFields = null;
// scan primary index
BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc, lowKeyFields, highKeyFields, true, true, primaryHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, null, null, false);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
// load secondary index
int[] fieldPermutation = { 6, 7, 8, 9, 0 };
TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, secondaryRecDesc, fieldPermutation, 0.7f, false, 1000L, true, secondaryHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBulkLoad, NC1_ID);
NullSinkOperatorDescriptor nsOpDesc = new NullSinkOperatorDescriptor(spec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, nsOpDesc, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primarySearchOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, secondaryBulkLoad, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoad, 0, nsOpDesc, 0);
spec.addRoot(nsOpDesc);
runTest(spec);
}
Aggregations