use of org.apache.cassandra.db.marshal.AbstractType in project eiger by wlloyd.
the class SSTableExport method serializeColumn.
/**
* Serialize a given column to the JSON format
*
* @param column column presentation
* @param comparator columns comparator
* @param cfMetaData Column Family metadata (to get validator)
*
* @return column as serialized list
*/
private static List<Object> serializeColumn(IColumn column, AbstractType comparator, CFMetaData cfMetaData) {
ArrayList<Object> serializedColumn = new ArrayList<Object>();
ByteBuffer name = ByteBufferUtil.clone(column.name());
ByteBuffer value = ByteBufferUtil.clone(column.value());
serializedColumn.add(comparator.getString(name));
if (column instanceof DeletedColumn) {
serializedColumn.add(ByteBufferUtil.bytesToHex(value));
} else {
AbstractType validator = cfMetaData.getValueValidator(name);
serializedColumn.add(validator.getString(value));
}
serializedColumn.add(column.timestamp());
if (column instanceof DeletedColumn) {
serializedColumn.add("d");
} else if (column instanceof ExpiringColumn) {
serializedColumn.add("e");
serializedColumn.add(((ExpiringColumn) column).getTimeToLive());
serializedColumn.add(column.getLocalDeletionTime());
} else if (column instanceof CounterColumn) {
serializedColumn.add("c");
serializedColumn.add(((CounterColumn) column).timestampOfLastDelete());
}
return serializedColumn;
}
use of org.apache.cassandra.db.marshal.AbstractType in project eiger by wlloyd.
the class Column method validateName.
protected void validateName(CFMetaData metadata) throws MarshalException {
AbstractType nameValidator = metadata.cfType == ColumnFamilyType.Super ? metadata.subcolumnComparator : metadata.comparator;
nameValidator.validate(name());
}
use of org.apache.cassandra.db.marshal.AbstractType in project stargate-core by tuplejump.
the class Fields method toString.
public static String toString(ByteBuffer byteBuffer, AbstractType<?> type) {
if (type instanceof CompositeType) {
CompositeType composite = (CompositeType) type;
List<AbstractType<?>> types = composite.types;
ByteBuffer[] components = composite.split(byteBuffer);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < components.length; i++) {
AbstractType<?> componentType = types.get(i);
ByteBuffer component = components[i];
sb.append(componentType.compose(component));
if (i < types.size() - 1) {
sb.append(':');
}
}
return sb.toString();
} else {
return type.compose(byteBuffer).toString();
}
}
use of org.apache.cassandra.db.marshal.AbstractType in project stargate-core by tuplejump.
the class FieldCreator method toString.
public static String toString(ByteBuffer byteBuffer, AbstractType<?> type) {
if (type instanceof CompositeType) {
CompositeType composite = (CompositeType) type;
List<AbstractType<?>> types = composite.types;
ByteBuffer[] components = composite.split(byteBuffer);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < components.length; i++) {
AbstractType<?> componentType = types.get(i);
ByteBuffer component = components[i];
sb.append(componentType.compose(component));
if (i < types.size() - 1) {
sb.append(':');
}
}
return sb.toString();
} else {
return type.compose(byteBuffer).toString();
}
}
use of org.apache.cassandra.db.marshal.AbstractType in project brisk by riptano.
the class SchemaManagerService method buildTable.
private Table buildTable(CfDef cfDef) {
Table table = new Table();
table.setDbName(cfDef.keyspace);
table.setTableName(cfDef.name);
table.setTableType(TableType.EXTERNAL_TABLE.toString());
table.putToParameters("EXTERNAL", "TRUE");
table.putToParameters("cassandra.ks.name", cfDef.keyspace);
table.putToParameters("cassandra.cf.name", cfDef.name);
table.putToParameters("cassandra.slice.predicate.size", "100");
table.putToParameters("storage_handler", "org.apache.hadoop.hive.cassandra.CassandraStorageHandler");
table.setPartitionKeys(new ArrayList<FieldSchema>());
// cassandra.column.mapping
StorageDescriptor sd = new StorageDescriptor();
sd.setInputFormat("org.apache.hadoop.hive.cassandra.input.HiveCassandraStandardColumnInputFormat");
sd.setOutputFormat("org.apache.hadoop.hive.cassandra.output.HiveCassandraOutputFormat");
sd.setParameters(new HashMap<String, String>());
try {
sd.setLocation(warehouse.getDefaultTablePath(cfDef.keyspace, cfDef.name).toString());
} catch (MetaException me) {
log.error("could not build path information correctly", me);
}
SerDeInfo serde = new SerDeInfo();
serde.setSerializationLib("org.apache.hadoop.hive.cassandra.serde.CassandraColumnSerDe");
serde.putToParameters("serialization.format", "1");
StringBuilder mapping = new StringBuilder();
StringBuilder validator = new StringBuilder();
try {
CFMetaData cfm = CFMetaData.fromThrift(cfDef);
AbstractType keyValidator = cfDef.key_validation_class != null ? TypeParser.parse(cfDef.key_validation_class) : BytesType.instance;
addTypeToStorageDescriptor(sd, ByteBufferUtil.bytes("row_key"), keyValidator, keyValidator);
mapping.append(":key");
validator.append(keyValidator.toString());
for (ColumnDef column : cfDef.getColumn_metadata()) {
addTypeToStorageDescriptor(sd, column.name, TypeParser.parse(cfDef.comparator_type), TypeParser.parse(column.getValidation_class()));
try {
mapping.append(",");
mapping.append(ByteBufferUtil.string(column.name));
validator.append(",");
validator.append(column.getValidation_class());
} catch (CharacterCodingException e) {
log.error("could not build column mapping correctly", e);
}
}
serde.putToParameters("cassandra.columns.mapping", mapping.toString());
serde.putToParameters("cassandra.cf.validatorType", validator.toString());
sd.setSerdeInfo(serde);
} catch (ConfigurationException ce) {
throw new CassandraHiveMetaStoreException("Problem converting comparator type: " + cfDef.comparator_type, ce);
} catch (InvalidRequestException ire) {
throw new CassandraHiveMetaStoreException("Problem parsing CfDef: " + cfDef.name, ire);
}
table.setSd(sd);
if (log.isDebugEnabled())
log.debug("constructed table for CF:{} {}", cfDef.name, table.toString());
return table;
}
Aggregations