use of org.apache.pig.impl.util.UDFContext in project hive by apache.
the class HCatBaseLoader method storeInUDFContext.
// helper methods
protected void storeInUDFContext(String signature, String key, Object value) {
UDFContext udfContext = UDFContext.getUDFContext();
Properties props = udfContext.getUDFProperties(this.getClass(), new String[] { signature });
props.put(key, value);
}
use of org.apache.pig.impl.util.UDFContext in project pygmalion by jeromatron.
the class DeleteColumns method exec.
public Tuple exec(Tuple input) throws IOException {
Tuple row = TupleFactory.getInstance().newTuple(2);
DataBag columns = BagFactory.getInstance().newDefaultBag();
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(DeleteColumns.class);
String fieldString = property.getProperty(UDFCONTEXT_SCHEMA_KEY);
String[] fieldnames = fieldString.split(INPUT_DELIM);
for (int i = 1; i < input.size(); i++) {
if (input.get(i) instanceof DataBag) {
for (Tuple cassandraColumn : (DataBag) input.get(i)) {
String name = cassandraColumn.get(0).toString();
columns.add(getColumnDef(name, null));
}
} else {
columns.add(getColumnDef(fieldnames[i], null));
}
}
row.set(0, input.get(0));
row.set(1, columns);
return row;
}
use of org.apache.pig.impl.util.UDFContext in project pygmalion by jeromatron.
the class DeleteColumns method outputSchema.
public Schema outputSchema(Schema input) {
StringBuilder builder = new StringBuilder();
List<Schema.FieldSchema> fields = input.getFields();
for (int i = 0; i < fields.size(); i++) {
builder.append(fields.get(i).alias);
if (i != fields.size() - 1) {
builder.append(OUTPUT_DELIM);
}
}
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(DeleteColumns.class);
property.setProperty(UDFCONTEXT_SCHEMA_KEY, builder.toString());
return super.outputSchema(input);
}
use of org.apache.pig.impl.util.UDFContext in project pygmalion by jeromatron.
the class ToCassandraBag method exec.
public Tuple exec(Tuple input) throws IOException {
Tuple row = TupleFactory.getInstance().newTuple(2);
DataBag columns = BagFactory.getInstance().newDefaultBag();
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(ToCassandraBag.class);
String fieldString = property.getProperty(getSchemaKey());
String[] fieldnames = INPUT_DELIM.split(fieldString);
if (log.isDebugEnabled()) {
log.debug("Tuple: " + input.toDelimitedString(",") + " Fields: " + fieldString);
}
// IT IS ALWAYS ASSUMED THAT THE OBJECT AT INDEX 0 IS THE ROW KEY
if (input.get(0) == null)
throw new IOException("The object at index 0 is the row key, its value can't be null!");
if (input.size() != fieldnames.length) {
throw new IOException("There is a mismatch between the number of inputs (" + input.size() + " and fieldnames (" + fieldnames.length + ")");
}
for (int i = 1; i < input.size(); i++) {
if (input.get(i) instanceof DataBag) {
columns.addAll((DataBag) input.get(i));
} else {
columns.add(getColumnDef(fieldnames[i], input.get(i)));
}
}
row.set(0, input.get(0));
row.set(1, columns);
return row;
}
use of org.apache.pig.impl.util.UDFContext in project wonderdog by infochimps-labs.
the class ElasticSearchIndex method checkSchema.
/**
Check that schema is reasonable and serialize the field names as a string for later use.
*/
@Override
public void checkSchema(ResourceSchema s) throws IOException {
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(ResourceSchema.class);
String fieldNames = "";
for (String field : s.fieldNames()) {
fieldNames += field;
fieldNames += COMMA;
}
property.setProperty(PIG_ES_FIELD_NAMES, fieldNames);
}
Aggregations