use of org.apache.asterix.common.exceptions.RuntimeDataException in project asterixdb by apache.
the class SubsetCollectionDescriptor method createUnnestingEvaluatorFactory.
@Override
public IUnnestingEvaluatorFactory createUnnestingEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IUnnestingEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IUnnestingEvaluator createUnnestingEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
return new IUnnestingEvaluator() {
private IPointable inputVal = new VoidPointable();
private IScalarEvaluator evalList = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator evalStart = args[1].createScalarEvaluator(ctx);
private IScalarEvaluator evalLen = args[2].createScalarEvaluator(ctx);
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private int numItems;
private int numItemsMax;
private int posStart;
private int posCrt;
private ATypeTag itemTag;
private boolean selfDescList = false;
private boolean metUnknown = false;
@Override
public void init(IFrameTupleReference tuple) throws HyracksDataException {
try {
evalStart.evaluate(tuple, inputVal);
posStart = ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 0, inputVal.getByteArray(), inputVal.getStartOffset());
evalLen.evaluate(tuple, inputVal);
numItems = ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 1, inputVal.getByteArray(), inputVal.getStartOffset());
evalList.evaluate(tuple, inputVal);
byte[] serList = inputVal.getByteArray();
int offset = inputVal.getStartOffset();
metUnknown = false;
byte typeTag = serList[offset];
if (typeTag == ATypeTag.SERIALIZED_MISSING_TYPE_TAG || typeTag == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
metUnknown = true;
return;
}
if (typeTag != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG && typeTag != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new RuntimeDataException(ErrorCode.COERCION, getIdentifier());
}
if (typeTag == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
numItemsMax = AOrderedListSerializerDeserializer.getNumberOfItems(serList, offset);
} else {
numItemsMax = AUnorderedListSerializerDeserializer.getNumberOfItems(serList, offset);
}
itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[offset + 1]);
if (itemTag == ATypeTag.ANY) {
selfDescList = true;
}
posCrt = posStart;
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
@Override
public boolean step(IPointable result) throws HyracksDataException {
if (!metUnknown && posCrt < posStart + numItems && posCrt < numItemsMax) {
resultStorage.reset();
byte[] serList = inputVal.getByteArray();
int offset = inputVal.getStartOffset();
int itemLength = 0;
try {
int itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serList, offset, posCrt);
if (selfDescList) {
itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[itemOffset]);
}
itemLength = NonTaggedFormatUtil.getFieldValueLength(serList, itemOffset, itemTag, selfDescList);
if (!selfDescList) {
resultStorage.getDataOutput().writeByte(itemTag.serialize());
}
resultStorage.getDataOutput().write(serList, itemOffset, itemLength + (!selfDescList ? 0 : 1));
} catch (IOException e) {
throw new HyracksDataException(e);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
result.set(resultStorage);
++posCrt;
return true;
}
return false;
}
};
}
};
}
use of org.apache.asterix.common.exceptions.RuntimeDataException in project asterixdb by apache.
the class DataflowControllerProvider method getDataflowController.
// TODO: Instead, use a factory just like data source and data parser.
@SuppressWarnings({ "rawtypes", "unchecked" })
public static IDataFlowController getDataflowController(ARecordType recordType, IHyracksTaskContext ctx, int partition, IExternalDataSourceFactory dataSourceFactory, IDataParserFactory dataParserFactory, Map<String, String> configuration, boolean indexingOp, boolean isFeed, FeedLogManager feedLogManager) throws HyracksDataException {
try {
switch(dataSourceFactory.getDataSourceType()) {
case RECORDS:
IRecordReaderFactory<?> recordReaderFactory = (IRecordReaderFactory<?>) dataSourceFactory;
IRecordReader<?> recordReader = recordReaderFactory.createRecordReader(ctx, partition);
IRecordDataParserFactory<?> recordParserFactory = (IRecordDataParserFactory<?>) dataParserFactory;
IRecordDataParser<?> dataParser = recordParserFactory.createRecordParser(ctx);
if (indexingOp) {
return new IndexingDataFlowController(ctx, DataflowUtils.getTupleForwarder(configuration, feedLogManager), dataParser, recordReader, ((IIndexingDatasource) recordReader).getIndexer());
} else if (isFeed) {
FeedTupleForwarder tupleForwarder = (FeedTupleForwarder) DataflowUtils.getTupleForwarder(configuration, feedLogManager);
boolean isChangeFeed = ExternalDataUtils.isChangeFeed(configuration);
boolean isRecordWithMeta = ExternalDataUtils.isRecordWithMeta(configuration);
if (isRecordWithMeta) {
if (isChangeFeed) {
int numOfKeys = ExternalDataUtils.getNumberOfKeys(configuration);
return new ChangeFeedWithMetaDataFlowController(ctx, tupleForwarder, feedLogManager, numOfKeys + 2, (IRecordWithMetadataParser) dataParser, recordReader);
} else {
return new FeedWithMetaDataFlowController(ctx, tupleForwarder, feedLogManager, 2, (IRecordWithMetadataParser) dataParser, recordReader);
}
} else if (isChangeFeed) {
int numOfKeys = ExternalDataUtils.getNumberOfKeys(configuration);
return new ChangeFeedDataFlowController(ctx, tupleForwarder, feedLogManager, numOfKeys + 1, (IRecordWithPKDataParser) dataParser, recordReader);
} else {
return new FeedRecordDataFlowController(ctx, tupleForwarder, feedLogManager, 1, dataParser, recordReader);
}
} else {
return new RecordDataFlowController(ctx, DataflowUtils.getTupleForwarder(configuration, feedLogManager), dataParser, recordReader, 1);
}
case STREAM:
IInputStreamFactory streamFactory = (IInputStreamFactory) dataSourceFactory;
AsterixInputStream stream = streamFactory.createInputStream(ctx, partition);
IStreamDataParserFactory streamParserFactory = (IStreamDataParserFactory) dataParserFactory;
IStreamDataParser streamParser = streamParserFactory.createInputStreamParser(ctx, partition);
streamParser.setInputStream(stream);
if (isFeed) {
return new FeedStreamDataFlowController(ctx, (FeedTupleForwarder) DataflowUtils.getTupleForwarder(configuration, feedLogManager), feedLogManager, streamParser, stream);
} else {
return new StreamDataFlowController(ctx, DataflowUtils.getTupleForwarder(configuration, null), streamParser);
}
default:
throw new RuntimeDataException(ErrorCode.PROVIDER_DATAFLOW_CONTROLLER_UNKNOWN_DATA_SOURCE, dataSourceFactory.getDataSourceType());
}
} catch (IOException | AsterixException e) {
throw new HyracksDataException(e);
}
}
use of org.apache.asterix.common.exceptions.RuntimeDataException in project asterixdb by apache.
the class DatasourceFactoryProvider method getInputStreamFactory.
public static IInputStreamFactory getInputStreamFactory(ILibraryManager libraryManager, String streamSource, Map<String, String> configuration) throws HyracksDataException {
IInputStreamFactory streamSourceFactory;
if (ExternalDataUtils.isExternal(streamSource)) {
String dataverse = ExternalDataUtils.getDataverse(configuration);
streamSourceFactory = ExternalDataUtils.createExternalInputStreamFactory(libraryManager, dataverse, streamSource);
} else {
switch(streamSource) {
case ExternalDataConstants.STREAM_LOCAL_FILESYSTEM:
streamSourceFactory = new LocalFSInputStreamFactory();
break;
case ExternalDataConstants.SOCKET:
case ExternalDataConstants.ALIAS_SOCKET_ADAPTER:
streamSourceFactory = new SocketServerInputStreamFactory();
break;
case ExternalDataConstants.STREAM_SOCKET_CLIENT:
streamSourceFactory = new SocketServerInputStreamFactory();
break;
default:
try {
streamSourceFactory = (IInputStreamFactory) Class.forName(streamSource).newInstance();
} catch (Exception e) {
throw new RuntimeDataException(ErrorCode.PROVIDER_DATASOURCE_FACTORY_UNKNOWN_INPUT_STREAM_FACTORY, e, streamSource);
}
}
}
return streamSourceFactory;
}
use of org.apache.asterix.common.exceptions.RuntimeDataException in project asterixdb by apache.
the class HDFSUtils method initializeIndexingHDFSScheduler.
public static IndexingScheduler initializeIndexingHDFSScheduler(ICCServiceContext serviceCtx) throws HyracksDataException {
ICCContext ccContext = serviceCtx.getCCContext();
IndexingScheduler scheduler = null;
try {
scheduler = new IndexingScheduler(ccContext.getClusterControllerInfo().getClientNetAddress(), ccContext.getClusterControllerInfo().getClientNetPort());
} catch (HyracksException e) {
throw new RuntimeDataException(ErrorCode.UTIL_HDFS_UTILS_CANNOT_OBTAIN_HDFS_SCHEDULER);
}
return scheduler;
}
use of org.apache.asterix.common.exceptions.RuntimeDataException in project asterixdb by apache.
the class HDFSUtils method initializeHDFSScheduler.
public static Scheduler initializeHDFSScheduler(ICCServiceContext serviceCtx) throws HyracksDataException {
ICCContext ccContext = serviceCtx.getCCContext();
Scheduler scheduler = null;
try {
scheduler = new Scheduler(ccContext.getClusterControllerInfo().getClientNetAddress(), ccContext.getClusterControllerInfo().getClientNetPort());
} catch (HyracksException e) {
throw new RuntimeDataException(ErrorCode.UTIL_HDFS_UTILS_CANNOT_OBTAIN_HDFS_SCHEDULER);
}
return scheduler;
}
Aggregations