use of org.apache.pig.impl.logicalLayer.FrontendException in project hive by apache.
the class AbstractHCatStorerTest method testNoAlias.
@Test
public void testNoAlias() throws Exception {
AbstractHCatLoaderTest.dropTable("junit_parted", driver);
AbstractHCatLoaderTest.createTableDefaultDB("junit_parted", "a int, b string", "ds " + "string", driver, storageFormat);
PigServer server = createPigServer(false);
boolean errCaught = false;
try {
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
server.registerQuery("B = foreach A generate a+10, b;");
server.registerQuery("store B into 'junit_parted' using " + HCatStorer.class.getName() + "('ds=20100101');");
server.executeBatch();
} catch (PigException fe) {
PigException pe = LogUtils.getPigException(fe);
assertTrue(pe instanceof FrontendException);
assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
assertTrue(pe.getMessage().contains("Column name for a field is not specified. Please provide the full schema as an argument to HCatStorer."));
errCaught = true;
}
assertTrue(errCaught);
errCaught = false;
try {
server.setBatchOn();
server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, B:chararray);");
server.registerQuery("B = foreach A generate a, B;");
server.registerQuery("store B into 'junit_parted' using " + HCatStorer.class.getName() + "('ds=20100101');");
server.executeBatch();
} catch (PigException fe) {
PigException pe = LogUtils.getPigException(fe);
assertTrue(pe instanceof FrontendException);
assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
assertTrue(pe.getMessage().contains("Column names should all be in lowercase. Invalid name found: B"));
errCaught = true;
}
driver.run("drop table junit_parted");
assertTrue(errCaught);
}
use of org.apache.pig.impl.logicalLayer.FrontendException in project parquet-mr by apache.
the class TupleConverter method newConverter.
static Converter newConverter(FieldSchema pigField, Type type, final ParentValueContainer parent, boolean elephantBirdCompatible, boolean columnIndexAccess) {
try {
switch(pigField.type) {
case DataType.BAG:
return new BagConverter(type.asGroupType(), pigField, parent, elephantBirdCompatible, columnIndexAccess);
case DataType.MAP:
return new MapConverter(type.asGroupType(), pigField, parent, elephantBirdCompatible, columnIndexAccess);
case DataType.TUPLE:
return new TupleConverter(type.asGroupType(), pigField.schema, elephantBirdCompatible, columnIndexAccess) {
@Override
public void end() {
super.end();
parent.add(this.currentTuple);
}
};
case DataType.CHARARRAY:
// a custom implementation will be needed for each type. Just default to no dictionary.
return new FieldStringConverter(parent, type.getOriginalType() == OriginalType.UTF8);
case DataType.BYTEARRAY:
return new FieldByteArrayConverter(parent);
case DataType.INTEGER:
return new FieldIntegerConverter(parent);
case DataType.BOOLEAN:
if (elephantBirdCompatible) {
return new FieldIntegerConverter(parent);
} else {
return new FieldBooleanConverter(parent);
}
case DataType.FLOAT:
return new FieldFloatConverter(parent);
case DataType.DOUBLE:
return new FieldDoubleConverter(parent);
case DataType.LONG:
return new FieldLongConverter(parent);
case DataType.BIGDECIMAL:
return new FieldBigDecimalConverter(type, parent);
default:
throw new TupleConversionException("unsupported pig type: " + pigField);
}
} catch (FrontendException e) {
throw new TupleConversionException("error while preparing converter for:\n" + pigField + "\n" + type, e);
} catch (RuntimeException e) {
throw new TupleConversionException("error while preparing converter for:\n" + pigField + "\n" + type, e);
}
}
use of org.apache.pig.impl.logicalLayer.FrontendException in project parquet-mr by apache.
the class PigSchemaConverter method getSimpleFieldSchema.
private FieldSchema getSimpleFieldSchema(final String fieldName, Type parquetType) throws FrontendException {
final PrimitiveTypeName parquetPrimitiveTypeName = parquetType.asPrimitiveType().getPrimitiveTypeName();
final OriginalType originalType = parquetType.getOriginalType();
return parquetPrimitiveTypeName.convert(new PrimitiveTypeNameConverter<Schema.FieldSchema, FrontendException>() {
@Override
public FieldSchema convertFLOAT(PrimitiveTypeName primitiveTypeName) throws FrontendException {
return new FieldSchema(fieldName, null, DataType.FLOAT);
}
@Override
public FieldSchema convertDOUBLE(PrimitiveTypeName primitiveTypeName) throws FrontendException {
return new FieldSchema(fieldName, null, DataType.DOUBLE);
}
@Override
public FieldSchema convertINT32(PrimitiveTypeName primitiveTypeName) throws FrontendException {
return new FieldSchema(fieldName, null, DataType.INTEGER);
}
@Override
public FieldSchema convertINT64(PrimitiveTypeName primitiveTypeName) throws FrontendException {
return new FieldSchema(fieldName, null, DataType.LONG);
}
@Override
public FieldSchema convertINT96(PrimitiveTypeName primitiveTypeName) throws FrontendException {
LOG.warn("Converting type " + primitiveTypeName + " to bytearray");
return new FieldSchema(fieldName, null, DataType.BYTEARRAY);
}
@Override
public FieldSchema convertFIXED_LEN_BYTE_ARRAY(PrimitiveTypeName primitiveTypeName) throws FrontendException {
if (originalType == OriginalType.DECIMAL) {
return new FieldSchema(fieldName, null, DataType.BIGDECIMAL);
} else {
return new FieldSchema(fieldName, null, DataType.BYTEARRAY);
}
}
@Override
public FieldSchema convertBOOLEAN(PrimitiveTypeName primitiveTypeName) throws FrontendException {
return new FieldSchema(fieldName, null, DataType.BOOLEAN);
}
@Override
public FieldSchema convertBINARY(PrimitiveTypeName primitiveTypeName) throws FrontendException {
if (originalType != null && originalType == OriginalType.UTF8) {
return new FieldSchema(fieldName, null, DataType.CHARARRAY);
} else {
return new FieldSchema(fieldName, null, DataType.BYTEARRAY);
}
}
});
}
use of org.apache.pig.impl.logicalLayer.FrontendException in project parquet-mr by apache.
the class PigSchemaConverter method convertFields.
private Schema convertFields(List<Type> parquetFields) {
List<FieldSchema> fields = new ArrayList<Schema.FieldSchema>();
for (Type parquetType : parquetFields) {
try {
FieldSchema innerfieldSchema = getFieldSchema(parquetType);
if (parquetType.isRepetition(Repetition.REPEATED)) {
Schema bagSchema = new Schema(Arrays.asList(innerfieldSchema));
fields.add(new FieldSchema(null, bagSchema, DataType.BAG));
} else {
fields.add(innerfieldSchema);
}
} catch (FrontendException fe) {
throw new SchemaConversionException("can't convert " + parquetType, fe);
}
}
return new Schema(fields);
}
use of org.apache.pig.impl.logicalLayer.FrontendException in project parquet-mr by apache.
the class TestParquetLoader method testProjectionPushdown.
@Test
public void testProjectionPushdown() throws Exception {
PigServer pigServer = new PigServer(ExecType.LOCAL);
pigServer.setValidateEachStatement(true);
String out = "target/out";
int rows = 10;
Data data = Storage.resetData(pigServer);
List<Tuple> list = new ArrayList<Tuple>();
for (int i = 0; i < rows; i++) {
list.add(Storage.tuple(i, "a" + i, i * 2));
}
data.set("in", "i:int, a:chararray, b:int", list);
pigServer.setBatchOn();
pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();");
pigServer.deleteFile(out);
pigServer.registerQuery("Store A into '" + out + "' using " + ParquetStorer.class.getName() + "();");
pigServer.executeBatch();
List<Tuple> expectedList = new ArrayList<Tuple>();
for (int i = 0; i < rows; i++) {
expectedList.add(Storage.tuple("a" + i));
}
pigServer.registerQuery("C = LOAD '" + out + "' using " + ParquetLoader.class.getName() + "();");
pigServer.registerQuery("D = foreach C generate a;");
pigServer.registerQuery("Store D into 'out' using mock.Storage();");
pigServer.executeBatch();
List<Tuple> actualList = data.get("out");
pigServer.registerQuery("C = LOAD '" + out + "' using " + ParquetLoader.class.getName() + "('a:chararray, b:int');");
Assert.assertEquals("{a: chararray,b: int}", pigServer.dumpSchema("C").toString());
try {
pigServer.registerQuery("D = foreach C generate i;");
Assert.fail("Frontend Exception expected");
} catch (FrontendException fe) {
}
// we need to reset pigserver here as there is some problem is pig
// that causes pigserver to behave mysteriously after the exception
// TODO investigate if its is fixed in pig trunk
pigServer = new PigServer(ExecType.LOCAL);
data = Storage.resetData(pigServer);
pigServer.setBatchOn();
pigServer.registerQuery("C = LOAD '" + out + "' using " + ParquetLoader.class.getName() + "('a:chararray, b:int');");
pigServer.registerQuery("D = foreach C generate a;");
pigServer.registerQuery("Store D into 'out' using mock.Storage();");
pigServer.executeBatch();
actualList = data.get("out");
Assert.assertEquals(expectedList, actualList);
}
Aggregations