use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class PhoenixRuntime method encodeValues.
/**
* @param conn connection that was used for reading/generating value.
* @param fullTableName fully qualified table name
* @param values values of the columns
* @param columns list of pair of column that includes column family as first part and column name as the second part.
* Column family is optional and hence nullable. Columns in the list have to be in the same order as the order of occurence
* of their values in the object array.
* @return values encoded in a byte array
* @throws SQLException
* @see {@link #decodeValues(Connection, String, byte[], List)}
*/
@Deprecated
public static byte[] encodeValues(Connection conn, String fullTableName, Object[] values, List<Pair<String, String>> columns) throws SQLException {
PTable table = getTable(conn, fullTableName);
List<PColumn> pColumns = getPColumns(table, columns);
List<Expression> expressions = new ArrayList<Expression>(pColumns.size());
int i = 0;
for (PColumn col : pColumns) {
Object value = values[i];
// for purposes of encoding, sort order of the columns doesn't matter.
Expression expr = LiteralExpression.newConstant(value, col.getDataType(), col.getMaxLength(), col.getScale());
expressions.add(expr);
i++;
}
KeyValueSchema kvSchema = buildKeyValueSchema(pColumns);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
ValueBitSet valueSet = ValueBitSet.newInstance(kvSchema);
return kvSchema.toBytes(expressions.toArray(new Expression[0]), valueSet, ptr);
}
use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class PhoenixRuntime method generateColumnInfo.
/**
* Get list of ColumnInfos that contain Column Name and its associated
* PDataType for an import. The supplied list of columns can be null -- if it is non-null,
* it represents a user-supplied list of columns to be imported.
*
* @param conn Phoenix connection from which metadata will be read
* @param tableName Phoenix table name whose columns are to be checked. Can include a schema
* name
* @param columns user-supplied list of import columns, can be null
*/
public static List<ColumnInfo> generateColumnInfo(Connection conn, String tableName, List<String> columns) throws SQLException {
PTable table = PhoenixRuntime.getTable(conn, SchemaUtil.normalizeFullTableName(tableName));
List<ColumnInfo> columnInfoList = Lists.newArrayList();
Set<String> unresolvedColumnNames = new TreeSet<String>();
if (columns == null || columns.isEmpty()) {
// use all columns in the table
int offset = (table.getBucketNum() == null ? 0 : 1);
for (int i = offset; i < table.getColumns().size(); i++) {
PColumn pColumn = table.getColumns().get(i);
columnInfoList.add(PhoenixRuntime.getColumnInfo(pColumn));
}
} else {
// Leave "null" as indication to skip b/c it doesn't exist
for (int i = 0; i < columns.size(); i++) {
String columnName = columns.get(i);
try {
ColumnInfo columnInfo = PhoenixRuntime.getColumnInfo(table, columnName);
columnInfoList.add(columnInfo);
} catch (ColumnNotFoundException cnfe) {
unresolvedColumnNames.add(columnName);
} catch (AmbiguousColumnException ace) {
unresolvedColumnNames.add(columnName);
}
}
}
// if there exists columns that cannot be resolved, error out.
if (unresolvedColumnNames.size() > 0) {
StringBuilder exceptionMessage = new StringBuilder();
boolean first = true;
exceptionMessage.append("Unable to resolve these column names:\n");
for (String col : unresolvedColumnNames) {
if (first)
first = false;
else
exceptionMessage.append(",");
exceptionMessage.append(col);
}
exceptionMessage.append("\nAvailable columns with column families:\n");
first = true;
for (PColumn pColumn : table.getColumns()) {
if (first)
first = false;
else
exceptionMessage.append(",");
exceptionMessage.append(pColumn.toString());
}
throw new SQLException(exceptionMessage.toString());
}
return columnInfoList;
}
use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class IndexUtil method getViewConstants.
public static byte[][] getViewConstants(PTable dataTable) {
if (dataTable.getType() != PTableType.VIEW && dataTable.getType() != PTableType.PROJECTED)
return null;
int dataPosOffset = (dataTable.getBucketNum() != null ? 1 : 0) + (dataTable.isMultiTenant() ? 1 : 0);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
List<byte[]> viewConstants = new ArrayList<byte[]>();
List<PColumn> dataPkColumns = dataTable.getPKColumns();
for (int i = dataPosOffset; i < dataPkColumns.size(); i++) {
PColumn dataPKColumn = dataPkColumns.get(i);
if (dataPKColumn.getViewConstant() != null) {
if (IndexUtil.getViewConstantValue(dataPKColumn, ptr)) {
viewConstants.add(ByteUtil.copyKeyBytesIfNecessary(ptr));
} else {
throw new IllegalStateException();
}
}
}
return viewConstants.isEmpty() ? null : viewConstants.toArray(new byte[viewConstants.size()][]);
}
use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class IndexUtil method getDataColumns.
/**
* Return a list of {@code PColumn} for the associated data columns given the corresponding index columns. For a tenant
* specific view, the connection needs to be tenant specific too.
* @param dataTableName
* @param indexColumns
* @param conn
* @return
* @throws TableNotFoundException if table cannot be found in the connection's metdata cache
*/
public static List<PColumn> getDataColumns(String dataTableName, List<PColumn> indexColumns, PhoenixConnection conn) throws SQLException {
PTable dataTable = getTable(conn, dataTableName);
List<PColumn> dataColumns = new ArrayList<PColumn>(indexColumns.size());
for (PColumn indexColumn : indexColumns) {
dataColumns.add(getDataColumn(dataTable, indexColumn.getName().getString()));
}
return dataColumns;
}
use of org.apache.phoenix.schema.PColumn in project DataX by alibaba.
the class HbaseSQLReaderTask method constructRecordFromPhoenix.
private void constructRecordFromPhoenix(Record record, Map<String, Object> phoenixRecord) throws IOException {
for (Map.Entry<String, PColumn> pColumnItem : this.pColumns.entrySet()) {
Column column = this.convertPhoenixValueToDataxColumn(pColumnItem.getValue().getDataType().getSqlType(), phoenixRecord.get(pColumnItem.getKey()));
record.addColumn(column);
}
}
Aggregations