use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class HDFSDataSourceFactory method configure.
@Override
public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
try {
this.serviceCtx = serviceCtx;
this.configuration = configuration;
init((ICCServiceContext) serviceCtx);
JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
confFactory = new ConfFactory(conf);
clusterLocations = getPartitionConstraint();
int numPartitions = clusterLocations.getLocations().length;
// if files list was set, we restrict the splits to the list
InputSplit[] inputSplits;
if (files == null) {
inputSplits = conf.getInputFormat().getSplits(conf, numPartitions);
} else {
inputSplits = HDFSUtils.getSplits(conf, files);
}
if (indexingOp) {
readSchedule = indexingScheduler.getLocationConstraints(inputSplits);
} else {
readSchedule = hdfsScheduler.getLocationConstraints(inputSplits);
}
inputSplitsFactory = new InputSplitsFactory(inputSplits);
read = new boolean[readSchedule.length];
Arrays.fill(read, false);
String formatString = configuration.get(ExternalDataConstants.KEY_FORMAT);
if (formatString == null || formatString.equals(ExternalDataConstants.FORMAT_HDFS_WRITABLE)) {
RecordReader<?, ?> reader = conf.getInputFormat().getRecordReader(inputSplits[0], conf, Reporter.NULL);
this.recordClass = reader.createValue().getClass();
reader.close();
} else {
recordReaderClazz = StreamRecordReaderProvider.getRecordReaderClazz(configuration);
this.recordClass = char[].class;
}
} catch (IOException e) {
throw new AsterixException(e);
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class TwitterRecordReaderFactory method configure.
@Override
public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
try {
Class.forName("twitter4j.Twitter");
} catch (ClassNotFoundException e) {
throw new AsterixException(ErrorCode.ADAPTER_TWITTER_TWITTER4J_LIB_NOT_FOUND, e);
}
this.configuration = configuration;
this.serviceCtx = serviceCtx;
TwitterUtil.initializeConfigurationWithAuthInfo(configuration);
if (!validateConfiguration(configuration)) {
StringBuilder builder = new StringBuilder();
builder.append("One or more parameters are missing from adapter configuration\n");
builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET + "\n");
builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET);
throw new AsterixException(builder.toString());
}
if (configuration.get(ExternalDataConstants.KEY_READER).equals(ExternalDataConstants.READER_PULL_TWITTER)) {
if (configuration.get(SearchAPIConstants.QUERY) == null) {
throw new AsterixException("parameter " + SearchAPIConstants.QUERY + " not specified as part of adaptor configuration");
}
String interval = configuration.get(SearchAPIConstants.INTERVAL);
if (interval != null) {
try {
Integer.parseInt(interval);
} catch (NumberFormatException nfe) {
throw new IllegalArgumentException("parameter " + SearchAPIConstants.INTERVAL + " is defined incorrectly, expecting a number");
}
} else {
configuration.put(SearchAPIConstants.INTERVAL, DEFAULT_INTERVAL);
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning(" Parameter " + SearchAPIConstants.INTERVAL + " not defined, using default (" + DEFAULT_INTERVAL + ")");
}
}
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class HDFSLookupReaderFactory method configure.
@Override
public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
this.serviceCtx = serviceCtx;
this.configuration = configuration;
JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
try {
confFactory = new ConfFactory(conf);
} catch (HyracksDataException e) {
throw new AsterixException(e);
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class RTreeResourceFactoryProvider method getCmpFactories.
private static IBinaryComparatorFactory[] getCmpFactories(MetadataProvider metadataProvider, Index index, ARecordType recordType, ARecordType metaType) throws AlgebricksException {
IBinaryComparatorFactoryProvider cmpFactoryProvider = metadataProvider.getStorageComponentProvider().getComparatorFactoryProvider();
List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
int numSecondaryKeys = secondaryKeyFields.size();
if (numSecondaryKeys != 1) {
throw new AsterixException("Cannot use " + numSecondaryKeys + " fields as a key for the R-tree index. " + "There can be only one field as a key for the R-tree index.");
}
List<Integer> keySourceIndicators = index.getKeyFieldSourceIndicators();
ARecordType sourceType;
if (keySourceIndicators == null || keySourceIndicators.get(0) == 0) {
sourceType = recordType;
} else {
sourceType = metaType;
}
Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), secondaryKeyFields.get(0), sourceType);
IAType spatialType = spatialTypePair.first;
if (spatialType == null) {
throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
}
IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numNestedSecondaryKeyFields = numDimensions * 2;
IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
secondaryComparatorFactories[i] = cmpFactoryProvider.getBinaryComparatorFactory(nestedKeyType, true);
}
return secondaryComparatorFactories;
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class RTreeResourceFactoryProvider method getTypeTraits.
private static ITypeTraits[] getTypeTraits(MetadataProvider metadataProvider, Dataset dataset, Index index, ARecordType recordType, ARecordType metaType) throws AlgebricksException {
ITypeTraitProvider ttProvider = metadataProvider.getStorageComponentProvider().getTypeTraitProvider();
List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
int numSecondaryKeys = secondaryKeyFields.size();
int numPrimaryKeys = dataset.getPrimaryKeys().size();
ITypeTraits[] primaryTypeTraits = dataset.getPrimaryTypeTraits(metadataProvider, recordType, metaType);
if (numSecondaryKeys != 1) {
throw new AsterixException("Cannot use " + numSecondaryKeys + " fields as a key for the R-tree index. " + "There can be only one field as a key for the R-tree index.");
}
ARecordType sourceType;
List<Integer> keySourceIndicators = index.getKeyFieldSourceIndicators();
if (keySourceIndicators == null || keySourceIndicators.get(0) == 0) {
sourceType = recordType;
} else {
sourceType = metaType;
}
Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), secondaryKeyFields.get(0), sourceType);
IAType spatialType = spatialTypePair.first;
if (spatialType == null) {
throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
}
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numNestedSecondaryKeyFields = numDimensions * 2;
ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
secondaryTypeTraits[i] = ttProvider.getTypeTrait(nestedKeyType);
}
for (int i = 0; i < numPrimaryKeys; i++) {
secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryTypeTraits[i];
}
return secondaryTypeTraits;
}
Aggregations