use of org.apache.pig.impl.util.UDFContext in project phoenix by apache.
the class PhoenixHBaseLoader method storeInUDFContext.
private void storeInUDFContext(final String signature, final String key, final String value) {
final UDFContext udfContext = UDFContext.getUDFContext();
final Properties props = udfContext.getUDFProperties(this.getClass(), new String[] { signature });
props.put(key, value);
}
use of org.apache.pig.impl.util.UDFContext in project phoenix by apache.
the class PhoenixHBaseLoader method getValueFromUDFContext.
private String getValueFromUDFContext(final String signature, final String key) {
final UDFContext udfContext = UDFContext.getUDFContext();
final Properties props = udfContext.getUDFProperties(this.getClass(), new String[] { signature });
return props.getProperty(key);
}
use of org.apache.pig.impl.util.UDFContext in project eiger by wlloyd.
the class CassandraStorage method getCfDef.
private CfDef getCfDef(String signature) {
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(CassandraStorage.class);
return cfdefFromString(property.getProperty(signature));
}
use of org.apache.pig.impl.util.UDFContext in project eiger by wlloyd.
the class CassandraStorage method initSchema.
/* Methods to get the column family schema from Cassandra */
private void initSchema(String signature) {
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(CassandraStorage.class);
// Only get the schema if we haven't already gotten it
if (!property.containsKey(signature)) {
Cassandra.Client client = null;
try {
client = ConfigHelper.getClientFromInputAddressList(conf);
CfDef cfDef = null;
client.set_keyspace(keyspace);
KsDef ksDef = client.describe_keyspace(keyspace);
List<CfDef> defs = ksDef.getCf_defs();
for (CfDef def : defs) {
if (column_family.equalsIgnoreCase(def.getName())) {
cfDef = def;
break;
}
}
property.setProperty(signature, cfdefToString(cfDef));
} catch (TException e) {
throw new RuntimeException(e);
} catch (InvalidRequestException e) {
throw new RuntimeException(e);
} catch (NotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
use of org.apache.pig.impl.util.UDFContext in project hive by apache.
the class HCatLoader method setLocation.
@Override
public void setLocation(String location, Job job) throws IOException {
HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get().setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true);
UDFContext udfContext = UDFContext.getUDFContext();
Properties udfProps = udfContext.getUDFProperties(this.getClass(), new String[] { signature });
job.getConfiguration().set(INNER_SIGNATURE, INNER_SIGNATURE_PREFIX + "_" + signature);
Pair<String, String> dbTablePair = PigHCatUtil.getDBTableNames(location);
dbName = dbTablePair.first;
tableName = dbTablePair.second;
RequiredFieldList requiredFieldsInfo = (RequiredFieldList) udfProps.get(PRUNE_PROJECTION_INFO);
// the Configuration
if (udfProps.containsKey(HCatConstants.HCAT_PIG_LOADER_LOCATION_SET)) {
for (Enumeration<Object> emr = udfProps.keys(); emr.hasMoreElements(); ) {
PigHCatUtil.getConfigFromUDFProperties(udfProps, job.getConfiguration(), emr.nextElement().toString());
}
if (!HCatUtil.checkJobContextIfRunningFromBackend(job)) {
//Combine credentials and credentials from job takes precedence for freshness
Credentials crd = jobCredentials.get(INNER_SIGNATURE_PREFIX + "_" + signature);
job.getCredentials().addAll(crd);
}
} else {
Job clone = new Job(job.getConfiguration());
HCatInputFormat.setInput(job, dbName, tableName, getPartitionFilterString());
InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(job.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO));
SpecialCases.addSpecialCasesParametersForHCatLoader(job.getConfiguration(), inputJobInfo.getTableInfo());
//be called many times.
for (Entry<String, String> keyValue : job.getConfiguration()) {
String oldValue = clone.getConfiguration().getRaw(keyValue.getKey());
if ((oldValue == null) || (keyValue.getValue().equals(oldValue) == false)) {
udfProps.put(keyValue.getKey(), keyValue.getValue());
}
}
udfProps.put(HCatConstants.HCAT_PIG_LOADER_LOCATION_SET, true);
//Store credentials in a private hash map and not the udf context to
// make sure they are not public.
Credentials crd = new Credentials();
crd.addAll(job.getCredentials());
jobCredentials.put(INNER_SIGNATURE_PREFIX + "_" + signature, crd);
}
if (requiredFieldsInfo != null) {
// convert to hcatschema and pass to HCatInputFormat
try {
//push down projections to columnar store works for RCFile and ORCFile
ArrayList<Integer> list = new ArrayList<Integer>(requiredFieldsInfo.getFields().size());
for (RequiredField rf : requiredFieldsInfo.getFields()) {
list.add(rf.getIndex());
}
ColumnProjectionUtils.setReadColumns(job.getConfiguration(), list);
outputSchema = phutil.getHCatSchema(requiredFieldsInfo.getFields(), signature, this.getClass());
HCatInputFormat.setOutputSchema(job, outputSchema);
} catch (Exception e) {
throw new IOException(e);
}
} else {
// else - this means pig's optimizer never invoked the pushProjection
// method - so we need all fields and hence we should not call the
// setOutputSchema on HCatInputFormat
ColumnProjectionUtils.setReadAllColumns(job.getConfiguration());
if (HCatUtil.checkJobContextIfRunningFromBackend(job)) {
try {
HCatSchema hcatTableSchema = (HCatSchema) udfProps.get(HCatConstants.HCAT_TABLE_SCHEMA);
outputSchema = hcatTableSchema;
HCatInputFormat.setOutputSchema(job, outputSchema);
} catch (Exception e) {
throw new IOException(e);
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("outputSchema=" + outputSchema);
}
}
Aggregations