use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class HCatBaseStorer method doSchemaValidations.
protected void doSchemaValidations(Schema pigSchema, HCatSchema tblSchema) throws FrontendException, HCatException {
// Iterate through all the elements in Pig Schema and do validations as
// dictated by semantics, consult HCatSchema of table when need be.
// helps with debug messages
int columnPos = 0;
for (FieldSchema pigField : pigSchema.getFields()) {
HCatFieldSchema hcatField = getColFromSchema(pigField.alias, tblSchema);
validateSchema(pigField, hcatField, pigSchema, tblSchema, columnPos++);
}
try {
PigHCatUtil.validateHCatTableSchemaFollowsPigRules(tblSchema);
} catch (IOException e) {
throw new FrontendException("HCatalog schema is not compatible with Pig: " + e.getMessage(), PigHCatUtil.PIG_EXCEPTION_CODE, e);
}
}
use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class InternalUtil method createStructObjectInspector.
static StructObjectInspector createStructObjectInspector(HCatSchema outputSchema) throws IOException {
if (outputSchema == null) {
throw new IOException("Invalid output schema specified");
}
List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
List<String> fieldNames = new ArrayList<String>();
for (HCatFieldSchema hcatFieldSchema : outputSchema.getFields()) {
TypeInfo type = TypeInfoUtils.getTypeInfoFromTypeString(hcatFieldSchema.getTypeString());
fieldNames.add(hcatFieldSchema.getName());
fieldInspectors.add(getObjectInspector(type));
}
StructObjectInspector structInspector = ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldInspectors);
return structInspector;
}
use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class TestSequenceFileReadWrite method getSchema.
private HCatSchema getSchema() throws HCatException {
HCatSchema schema = new HCatSchema(new ArrayList<HCatFieldSchema>());
schema.append(new HCatFieldSchema("a0", HCatFieldSchema.Type.INT, ""));
schema.append(new HCatFieldSchema("a1", HCatFieldSchema.Type.STRING, ""));
schema.append(new HCatFieldSchema("a2", HCatFieldSchema.Type.STRING, ""));
return schema;
}
use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class HCatLoader method getHCatComparisonString.
private String getHCatComparisonString(Expression expr) {
if (expr instanceof BinaryExpression) {
// call getHCatComparisonString on lhs and rhs, and and join the
// results with OpType string
// we can just use OpType.toString() on all Expression types except
// Equal, NotEqualt since Equal has '==' in toString() and
// we need '='
String opStr = null;
switch(expr.getOpType()) {
case OP_EQ:
opStr = " = ";
break;
default:
opStr = expr.getOpType().toString();
}
BinaryExpression be = (BinaryExpression) expr;
if (be.getRhs() instanceof Const) {
// If the expr is column op const, will try to cast the const to string
// according to the data type of the column
UDFContext udfContext = UDFContext.getUDFContext();
Properties udfProps = udfContext.getUDFProperties(this.getClass(), new String[] { signature });
HCatSchema hcatTableSchema = (HCatSchema) udfProps.get(HCatConstants.HCAT_TABLE_SCHEMA);
HCatFieldSchema fs = null;
try {
fs = hcatTableSchema.get(be.getLhs().toString());
} catch (HCatException e) {
// Shall never happen
}
if (fs != null) {
return "(" + getHCatComparisonString(be.getLhs()) + opStr + getHCatConstString((Const) be.getRhs(), fs.getType()) + ")";
}
}
return "(" + getHCatComparisonString(be.getLhs()) + opStr + getHCatComparisonString(be.getRhs()) + ")";
} else {
// should be a constant or column
return expr.toString();
}
}
use of org.apache.hive.hcatalog.data.schema.HCatFieldSchema in project hive by apache.
the class PigHCatUtil method getResourceSchema.
public static ResourceSchema getResourceSchema(HCatSchema hcatSchema) throws IOException {
List<ResourceFieldSchema> rfSchemaList = new ArrayList<ResourceFieldSchema>();
for (HCatFieldSchema hfs : hcatSchema.getFields()) {
ResourceFieldSchema rfSchema;
rfSchema = getResourceSchemaFromFieldSchema(hfs);
rfSchemaList.add(rfSchema);
}
ResourceSchema rSchema = new ResourceSchema();
rSchema.setFields(rfSchemaList.toArray(new ResourceFieldSchema[rfSchemaList.size()]));
return rSchema;
}
Aggregations