use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class DataflowControllerProvider method getDataflowController.
// TODO: Instead, use a factory just like data source and data parser.
@SuppressWarnings({ "rawtypes", "unchecked" })
public static IDataFlowController getDataflowController(ARecordType recordType, IHyracksTaskContext ctx, int partition, IExternalDataSourceFactory dataSourceFactory, IDataParserFactory dataParserFactory, Map<String, String> configuration, boolean indexingOp, boolean isFeed, FeedLogManager feedLogManager) throws HyracksDataException {
try {
switch(dataSourceFactory.getDataSourceType()) {
case RECORDS:
IRecordReaderFactory<?> recordReaderFactory = (IRecordReaderFactory<?>) dataSourceFactory;
IRecordReader<?> recordReader = recordReaderFactory.createRecordReader(ctx, partition);
IRecordDataParserFactory<?> recordParserFactory = (IRecordDataParserFactory<?>) dataParserFactory;
IRecordDataParser<?> dataParser = recordParserFactory.createRecordParser(ctx);
if (indexingOp) {
return new IndexingDataFlowController(ctx, DataflowUtils.getTupleForwarder(configuration, feedLogManager), dataParser, recordReader, ((IIndexingDatasource) recordReader).getIndexer());
} else if (isFeed) {
FeedTupleForwarder tupleForwarder = (FeedTupleForwarder) DataflowUtils.getTupleForwarder(configuration, feedLogManager);
boolean isChangeFeed = ExternalDataUtils.isChangeFeed(configuration);
boolean isRecordWithMeta = ExternalDataUtils.isRecordWithMeta(configuration);
if (isRecordWithMeta) {
if (isChangeFeed) {
int numOfKeys = ExternalDataUtils.getNumberOfKeys(configuration);
return new ChangeFeedWithMetaDataFlowController(ctx, tupleForwarder, feedLogManager, numOfKeys + 2, (IRecordWithMetadataParser) dataParser, recordReader);
} else {
return new FeedWithMetaDataFlowController(ctx, tupleForwarder, feedLogManager, 2, (IRecordWithMetadataParser) dataParser, recordReader);
}
} else if (isChangeFeed) {
int numOfKeys = ExternalDataUtils.getNumberOfKeys(configuration);
return new ChangeFeedDataFlowController(ctx, tupleForwarder, feedLogManager, numOfKeys + 1, (IRecordWithPKDataParser) dataParser, recordReader);
} else {
return new FeedRecordDataFlowController(ctx, tupleForwarder, feedLogManager, 1, dataParser, recordReader);
}
} else {
return new RecordDataFlowController(ctx, DataflowUtils.getTupleForwarder(configuration, feedLogManager), dataParser, recordReader, 1);
}
case STREAM:
IInputStreamFactory streamFactory = (IInputStreamFactory) dataSourceFactory;
AsterixInputStream stream = streamFactory.createInputStream(ctx, partition);
IStreamDataParserFactory streamParserFactory = (IStreamDataParserFactory) dataParserFactory;
IStreamDataParser streamParser = streamParserFactory.createInputStreamParser(ctx, partition);
streamParser.setInputStream(stream);
if (isFeed) {
return new FeedStreamDataFlowController(ctx, (FeedTupleForwarder) DataflowUtils.getTupleForwarder(configuration, feedLogManager), feedLogManager, streamParser, stream);
} else {
return new StreamDataFlowController(ctx, DataflowUtils.getTupleForwarder(configuration, null), streamParser);
}
default:
throw new RuntimeDataException(ErrorCode.PROVIDER_DATAFLOW_CONTROLLER_UNKNOWN_DATA_SOURCE, dataSourceFactory.getDataSourceType());
}
} catch (IOException | AsterixException e) {
throw new HyracksDataException(e);
}
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class DatasourceFactoryProvider method getInputStreamFactory.
public static IInputStreamFactory getInputStreamFactory(ILibraryManager libraryManager, String streamSource, Map<String, String> configuration) throws HyracksDataException {
IInputStreamFactory streamSourceFactory;
if (ExternalDataUtils.isExternal(streamSource)) {
String dataverse = ExternalDataUtils.getDataverse(configuration);
streamSourceFactory = ExternalDataUtils.createExternalInputStreamFactory(libraryManager, dataverse, streamSource);
} else {
switch(streamSource) {
case ExternalDataConstants.STREAM_LOCAL_FILESYSTEM:
streamSourceFactory = new LocalFSInputStreamFactory();
break;
case ExternalDataConstants.SOCKET:
case ExternalDataConstants.ALIAS_SOCKET_ADAPTER:
streamSourceFactory = new SocketServerInputStreamFactory();
break;
case ExternalDataConstants.STREAM_SOCKET_CLIENT:
streamSourceFactory = new SocketServerInputStreamFactory();
break;
default:
try {
streamSourceFactory = (IInputStreamFactory) Class.forName(streamSource).newInstance();
} catch (Exception e) {
throw new RuntimeDataException(ErrorCode.PROVIDER_DATASOURCE_FACTORY_UNKNOWN_INPUT_STREAM_FACTORY, e, streamSource);
}
}
}
return streamSourceFactory;
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class ExprTree method toString.
@Override
public String toString() {
ClassAdObjectPool objectPool = new ClassAdObjectPool();
ClassAdUnParser unparser = new ClassAdUnParser(objectPool);
AMutableCharArrayString string_representation = objectPool.strPool.get();
try {
unparser.unparse(string_representation, this);
} catch (HyracksDataException e) {
e.printStackTrace();
}
return string_representation.toString();
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class TweetParser method writeRecord.
public void writeRecord(JsonNode obj, DataOutput out, ARecordType curRecType) throws IOException {
IAType[] curTypes = null;
String[] curFNames = null;
int fieldN;
int attrIdx;
int expectedFieldsCount = 0;
ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
IARecordBuilder recBuilder = getRecordBuilder();
if (curRecType != null) {
curTypes = curRecType.getFieldTypes();
curFNames = curRecType.getFieldNames();
for (IAType curType : curTypes) {
if (!(curType instanceof AUnionType)) {
expectedFieldsCount++;
}
}
}
recBuilder.reset(curRecType);
recBuilder.init();
if (curRecType != null && !curRecType.isOpen()) {
// closed record type
fieldN = curFNames.length;
for (int iter1 = 0; iter1 < fieldN; iter1++) {
fieldValueBuffer.reset();
DataOutput fieldOutput = fieldValueBuffer.getDataOutput();
if (obj.get(curFNames[iter1]).isNull() && !(curTypes[iter1] instanceof AUnionType)) {
if (curRecType.isClosedField(curFNames[iter1])) {
throw new RuntimeDataException(ErrorCode.PARSER_TWEET_PARSER_CLOSED_FIELD_NULL, curFNames[iter1]);
} else {
continue;
}
} else {
if (writeField(obj.get(curFNames[iter1]), curTypes[iter1], fieldOutput)) {
recBuilder.addField(iter1, fieldValueBuffer);
}
}
}
} else {
//open record type
int closedFieldCount = 0;
IAType curFieldType = null;
String attrName;
Iterator<String> iter = obj.fieldNames();
while (iter.hasNext()) {
attrName = iter.next();
if (obj.get(attrName) == null || obj.get(attrName).isNull() || obj.size() == 0) {
continue;
}
attrIdx = checkAttrNameIdx(curFNames, attrName);
if (curRecType != null) {
curFieldType = curRecType.getFieldType(attrName);
}
fieldValueBuffer.reset();
fieldNameBuffer.reset();
DataOutput fieldOutput = fieldValueBuffer.getDataOutput();
if (writeField(obj.get(attrName), curFieldType, fieldOutput)) {
if (attrIdx == -1) {
aString.setValue(attrName);
stringSerde.serialize(aString, fieldNameBuffer.getDataOutput());
recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
} else {
recBuilder.addField(attrIdx, fieldValueBuffer);
closedFieldCount++;
}
}
}
if (curRecType != null && closedFieldCount < expectedFieldsCount) {
throw new HyracksDataException("Non-null field is null");
}
}
recBuilder.write(out, true);
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class Operation method doBitwise.
public static int doBitwise(int op, Value v1, Value v2, Value result, ClassAdObjectPool objectPool) throws HyracksDataException {
AMutableInt64 i1 = objectPool.int64Pool.get();
AMutableInt64 i2 = objectPool.int64Pool.get();
// bitwise operations are defined only on integers
if (op == OpKind_BITWISE_NOT_OP) {
if (!v1.isIntegerValue(i1)) {
result.setErrorValue();
return SigValues.SIG_CHLD1.ordinal();
}
} else if (!v1.isIntegerValue(i1) || !v2.isIntegerValue(i2)) {
result.setErrorValue();
return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
}
switch(op) {
case OpKind_BITWISE_NOT_OP:
result.setIntegerValue(~(i1.getLongValue()));
break;
case OpKind_BITWISE_OR_OP:
result.setIntegerValue(i1.getLongValue() | i2.getLongValue());
break;
case OpKind_BITWISE_AND_OP:
result.setIntegerValue(i1.getLongValue() & i2.getLongValue());
break;
case OpKind_BITWISE_XOR_OP:
result.setIntegerValue(i1.getLongValue() ^ i2.getLongValue());
break;
case OpKind_LEFT_SHIFT_OP:
result.setIntegerValue(i1.getLongValue() << i2.getLongValue());
break;
case OpKind_URIGHT_SHIFT_OP:
// if (i1 >= 0) {
// Could probably just use >>>
// sign bit is not on; >> will work fine
result.setIntegerValue(i1.getLongValue() >>> i2.getLongValue());
break;
case OpKind_RIGHT_SHIFT_OP:
// sign bit is off; >> will work fine
result.setIntegerValue(i1.getLongValue() >> i2.getLongValue());
break;
default:
// should not get here
throw new HyracksDataException("Should not get here");
}
if (op == OpKind_BITWISE_NOT_OP) {
return SigValues.SIG_CHLD1.ordinal();
}
return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
}
Aggregations