use of org.apache.pig.impl.util.UDFContext in project wonderdog by infochimps-labs.
the class ElasticSearchIndex method setStoreLocation.
/**
Look at passed in location and configuration and set options. Note that, since this
is called more than once, we need to make sure and not change anything we've already
set.
*/
@Override
public void setStoreLocation(String location, Job job) throws IOException {
String[] es_store = location.substring(5).split(SLASH);
if (es_store.length != 2) {
throw new RuntimeException("Please specify a valid elasticsearch index, eg. es://myindex/myobj");
}
Configuration conf = job.getConfiguration();
// Only set if we haven't already
if (conf.get(ES_INDEX_NAME) == null) {
try {
job.getConfiguration().set(ES_INDEX_NAME, es_store[0]);
job.getConfiguration().set(ES_OBJECT_TYPE, es_store[1]);
} catch (ArrayIndexOutOfBoundsException e) {
throw new RuntimeException("You must specify both an index and an object type.");
}
job.getConfiguration().setBoolean(ES_IS_JSON, false);
job.getConfiguration().set(ES_BULK_SIZE, bulkSize);
job.getConfiguration().set(ES_ID_FIELD, idField);
// Adds the elasticsearch.yml file (esConfig) and the plugins directory (esPlugins) to the distributed cache
try {
Path hdfsConfigPath = new Path(ES_CONFIG_HDFS_PATH);
Path hdfsPluginsPath = new Path(ES_PLUGINS_HDFS_PATH);
HadoopUtils.uploadLocalFile(new Path(LOCAL_SCHEME + esConfig), hdfsConfigPath, job.getConfiguration());
HadoopUtils.shipFileIfNotShipped(hdfsConfigPath, job.getConfiguration());
HadoopUtils.uploadLocalFile(new Path(LOCAL_SCHEME + esPlugins), hdfsPluginsPath, job.getConfiguration());
HadoopUtils.shipArchiveIfNotShipped(hdfsPluginsPath, job.getConfiguration());
} catch (Exception e) {
throw new RuntimeException(e);
}
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(ResourceSchema.class);
job.getConfiguration().set(ES_FIELD_NAMES, property.getProperty(PIG_ES_FIELD_NAMES));
}
}
use of org.apache.pig.impl.util.UDFContext in project mongo-hadoop by mongodb.
the class MongoInsertStorage method checkSchema.
@Override
public void checkSchema(final ResourceSchema schema) throws IOException {
this.schema = schema;
UDFContext udfc = UDFContext.getUDFContext();
Properties p = udfc.getUDFProperties(getClass(), new String[] { udfcSignature });
p.setProperty(SCHEMA_SIGNATURE, schema.toString());
}
use of org.apache.pig.impl.util.UDFContext in project mongo-hadoop by mongodb.
the class BSONStorage method prepareToWrite.
@Override
public void prepareToWrite(final RecordWriter writer) throws IOException {
out = writer;
if (out == null) {
throw new IOException("Invalid Record Writer");
}
UDFContext udfc = UDFContext.getUDFContext();
Properties p = udfc.getUDFProperties(getClass(), new String[] { udfcSignature });
String strSchema = p.getProperty(SCHEMA_SIGNATURE);
if (strSchema == null) {
LOG.warn("Could not find schema in UDF context!");
LOG.warn("Will attempt to write records without schema.");
}
try {
// Parse the schema from the string stored in the properties object.
schema = new ResourceSchema(Utils.getSchemaFromString(strSchema));
} catch (Exception e) {
schema = null;
LOG.warn(e.getMessage());
}
}
use of org.apache.pig.impl.util.UDFContext in project mongo-hadoop by mongodb.
the class MongoStorage method prepareToWrite.
public void prepareToWrite(final RecordWriter writer) throws IOException {
recordWriter = (MongoRecordWriter) writer;
LOG.info("Preparing to write to " + recordWriter);
if (recordWriter == null) {
throw new IOException("Invalid Record Writer");
}
// Parse the schema from the string stored in the properties object.
UDFContext udfc = UDFContext.getUDFContext();
Properties p = udfc.getUDFProperties(this.getClass(), new String[] { udfContextSignature });
String strSchema = p.getProperty(PIG_OUTPUT_SCHEMA_UDF_CONTEXT);
if (strSchema == null) {
throw new IOException("Could not find schema in UDF context");
}
try {
// Parse the schema from the string stored in the properties object.
this.schema = new ResourceSchema(Utils.getSchemaFromString(strSchema));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
if (options != null) {
// If we are insuring any indexes do so now:
for (MongoStorageOptions.Index in : options.getIndexes()) {
recordWriter.ensureIndex(in.index, in.options);
}
}
}
use of org.apache.pig.impl.util.UDFContext in project brisk by riptano.
the class CassandraStorage method getCfDef.
private CfDef getCfDef() {
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(CassandraStorage.class);
return cfdefFromString(property.getProperty(getSchemaContextKey()));
}
Aggregations