use of org.apache.phoenix.util.ColumnInfo in project phoenix by apache.
the class PhoenixResultWritable method delete.
public void delete(PreparedStatement statement) throws SQLException {
ColumnInfo columnInfo = null;
Object value = null;
try {
for (int i = 0, limit = primaryKeyColumnList.size(); i < limit; i++) {
columnInfo = columnMetadataList.get(i);
if (valueList.size() > i) {
value = valueList.get(i);
} else {
value = null;
}
if (value == null) {
statement.setNull(i + 1, columnInfo.getSqlType());
} else {
statement.setObject(i + 1, value, columnInfo.getSqlType());
}
}
} catch (SQLException | RuntimeException e) {
LOG.error("[column-info, value] : " + columnInfo + ", " + value);
throw e;
}
}
use of org.apache.phoenix.util.ColumnInfo in project phoenix by apache.
the class SqlQueryToColumnInfoFunctionTest method testValidSelectQuery.
@Test
public void testValidSelectQuery() throws SQLException {
String ddl = "CREATE TABLE EMPLOYEE " + " (id integer not null, name varchar, age integer,location varchar " + " CONSTRAINT pk PRIMARY KEY (id))\n";
createTestTable(getUrl(), ddl);
final String selectQuery = "SELECT name as a ,age AS b,UPPER(location) AS c FROM EMPLOYEE";
final ColumnInfo NAME_COLUMN = new ColumnInfo("A", Types.VARCHAR);
final ColumnInfo AGE_COLUMN = new ColumnInfo("B", Types.INTEGER);
final ColumnInfo LOCATION_COLUMN = new ColumnInfo("C", Types.VARCHAR);
final List<ColumnInfo> expectedColumnInfos = ImmutableList.of(NAME_COLUMN, AGE_COLUMN, LOCATION_COLUMN);
final List<ColumnInfo> actualColumnInfos = function.apply(selectQuery);
Assert.assertEquals(expectedColumnInfos, actualColumnInfos);
}
use of org.apache.phoenix.util.ColumnInfo in project phoenix by apache.
the class PhoenixRecordWritable method write.
@Override
public void write(PreparedStatement statement) throws SQLException {
// make sure we at least line up in size
if (upsertValues.size() != columnMetaDataList.size()) {
throw new UnsupportedOperationException("Provided " + upsertValues.size() + " upsert values, but column metadata expects " + columnMetaDataList.size() + " columns.");
}
// correlate each value (v) to a column type (c) and an index (i)
for (int i = 0; i < upsertValues.size(); i++) {
Object v = upsertValues.get(i);
ColumnInfo c = columnMetaDataList.get(i);
if (v == null) {
statement.setNull(i + 1, c.getSqlType());
continue;
}
// both Java and Joda dates used to work in 4.2.3, but now they must be java.sql.Date
// can override any other types here as needed
final Object finalObj;
final PDataType<?> finalType;
if (v instanceof DateTime) {
finalObj = new java.sql.Date(((DateTime) v).getMillis());
finalType = PDate.INSTANCE;
} else if (v instanceof java.util.Date) {
finalObj = new java.sql.Date(((java.util.Date) v).getTime());
finalType = PDate.INSTANCE;
} else {
finalObj = v;
finalType = c.getPDataType();
}
if (finalObj instanceof Object[]) {
setArrayInStatement(statement, finalType, (Object[]) finalObj, i + 1);
} else if (finalObj instanceof byte[]) {
// PVarbinary and PBinary are provided as byte[] but are treated as SQL objects
if (PDataType.equalsAny(finalType, PVarbinary.INSTANCE, PBinary.INSTANCE)) {
statement.setObject(i + 1, finalObj);
} else {
// otherwise set as array type
setArrayInStatement(statement, finalType, primativeArrayToObjectArray((byte[]) finalObj), i + 1);
}
} else if (finalObj instanceof short[]) {
setArrayInStatement(statement, finalType, primativeArrayToObjectArray((short[]) finalObj), i + 1);
} else if (finalObj instanceof int[]) {
setArrayInStatement(statement, finalType, primativeArrayToObjectArray((int[]) finalObj), i + 1);
} else if (finalObj instanceof long[]) {
setArrayInStatement(statement, finalType, primativeArrayToObjectArray((long[]) finalObj), i + 1);
} else if (finalObj instanceof float[]) {
setArrayInStatement(statement, finalType, primativeArrayToObjectArray((float[]) finalObj), i + 1);
} else if (finalObj instanceof double[]) {
setArrayInStatement(statement, finalType, primativeArrayToObjectArray((double[]) finalObj), i + 1);
} else {
statement.setObject(i + 1, finalObj);
}
}
}
use of org.apache.phoenix.util.ColumnInfo in project phoenix by apache.
the class PhoenixHBaseStorage method putNext.
@Override
public void putNext(Tuple t) throws IOException {
ResourceFieldSchema[] fieldSchemas = (schema == null) ? null : schema.getFields();
PhoenixRecordWritable record = new PhoenixRecordWritable(this.columnInfo);
try {
for (int i = 0; i < t.size(); i++) {
Object value = t.get(i);
if (value == null) {
record.add(null);
continue;
}
ColumnInfo cinfo = this.columnInfo.get(i);
byte type = (fieldSchemas == null) ? DataType.findType(value) : fieldSchemas[i].getType();
PDataType pDataType = PDataType.fromTypeId(cinfo.getSqlType());
Object v = TypeUtil.castPigTypeToPhoenix(value, type, pDataType);
record.add(v);
}
this.writer.write(null, record);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} catch (SQLException e) {
LOG.error("Error on tuple {} .", t);
throw new IOException(e);
}
}
use of org.apache.phoenix.util.ColumnInfo in project phoenix by apache.
the class FormatToBytesWritableMapperTest method testBuildColumnInfoList_ContainingNulls.
@Test
public void testBuildColumnInfoList_ContainingNulls() {
// A null value in the column info list means "skip that column in the input"
List<ColumnInfo> columnInfoListWithNull = Lists.newArrayList(new ColumnInfo("idCol", PInteger.INSTANCE.getSqlType()), null, new ColumnInfo("unsignedIntCol", PUnsignedInt.INSTANCE.getSqlType()), new ColumnInfo("stringArrayCol", PIntegerArray.INSTANCE.getSqlType()));
Configuration conf = new Configuration();
FormatToBytesWritableMapper.configureColumnInfoList(conf, columnInfoListWithNull);
List<ColumnInfo> fromConfig = FormatToBytesWritableMapper.buildColumnInfoList(conf);
assertEquals(columnInfoListWithNull, fromConfig);
}
Aggregations