use of org.apache.cassandra.thrift.ColumnDef in project brisk by riptano.
the class SchemaManagerService method buildTable.
private Table buildTable(CfDef cfDef) {
Table table = new Table();
table.setDbName(cfDef.keyspace);
table.setTableName(cfDef.name);
table.setTableType(TableType.EXTERNAL_TABLE.toString());
table.putToParameters("EXTERNAL", "TRUE");
table.putToParameters("cassandra.ks.name", cfDef.keyspace);
table.putToParameters("cassandra.cf.name", cfDef.name);
table.putToParameters("cassandra.slice.predicate.size", "100");
table.putToParameters("storage_handler", "org.apache.hadoop.hive.cassandra.CassandraStorageHandler");
table.setPartitionKeys(new ArrayList<FieldSchema>());
// cassandra.column.mapping
StorageDescriptor sd = new StorageDescriptor();
sd.setInputFormat("org.apache.hadoop.hive.cassandra.input.HiveCassandraStandardColumnInputFormat");
sd.setOutputFormat("org.apache.hadoop.hive.cassandra.output.HiveCassandraOutputFormat");
sd.setParameters(new HashMap<String, String>());
try {
sd.setLocation(warehouse.getDefaultTablePath(cfDef.keyspace, cfDef.name).toString());
} catch (MetaException me) {
log.error("could not build path information correctly", me);
}
SerDeInfo serde = new SerDeInfo();
serde.setSerializationLib("org.apache.hadoop.hive.cassandra.serde.CassandraColumnSerDe");
serde.putToParameters("serialization.format", "1");
StringBuilder mapping = new StringBuilder();
StringBuilder validator = new StringBuilder();
try {
CFMetaData cfm = CFMetaData.fromThrift(cfDef);
AbstractType keyValidator = cfDef.key_validation_class != null ? TypeParser.parse(cfDef.key_validation_class) : BytesType.instance;
addTypeToStorageDescriptor(sd, ByteBufferUtil.bytes("row_key"), keyValidator, keyValidator);
mapping.append(":key");
validator.append(keyValidator.toString());
for (ColumnDef column : cfDef.getColumn_metadata()) {
addTypeToStorageDescriptor(sd, column.name, TypeParser.parse(cfDef.comparator_type), TypeParser.parse(column.getValidation_class()));
try {
mapping.append(",");
mapping.append(ByteBufferUtil.string(column.name));
validator.append(",");
validator.append(column.getValidation_class());
} catch (CharacterCodingException e) {
log.error("could not build column mapping correctly", e);
}
}
serde.putToParameters("cassandra.columns.mapping", mapping.toString());
serde.putToParameters("cassandra.cf.validatorType", validator.toString());
sd.setSerdeInfo(serde);
} catch (ConfigurationException ce) {
throw new CassandraHiveMetaStoreException("Problem converting comparator type: " + cfDef.comparator_type, ce);
} catch (InvalidRequestException ire) {
throw new CassandraHiveMetaStoreException("Problem parsing CfDef: " + cfDef.name, ire);
}
table.setSd(sd);
if (log.isDebugEnabled())
log.debug("constructed table for CF:{} {}", cfDef.name, table.toString());
return table;
}
use of org.apache.cassandra.thrift.ColumnDef in project brisk by riptano.
the class MetaStoreTestBase method setupOtherKeyspace.
protected KsDef setupOtherKeyspace(Configuration configuration, String ksName, String keyValidator, String comparator, boolean addMetaData) throws Exception {
CfDef cf = new CfDef(ksName, "OtherCf1");
cf.setKey_validation_class(keyValidator);
cf.setComparator_type(comparator);
if (addMetaData) {
cf.addToColumn_metadata(new ColumnDef(ByteBufferUtil.bytes("col_name_utf8"), UTF8Type.class.getName()));
cf.addToColumn_metadata(new ColumnDef(ByteBufferUtil.bytes("col_name_bytes"), BytesType.class.getName()));
cf.addToColumn_metadata(new ColumnDef(ByteBufferUtil.bytes("col_name_int"), IntegerType.class.getName()));
cf.addToColumn_metadata(new ColumnDef(ByteBufferUtil.bytes("col_name_long"), LongType.class.getName()));
cf.addToColumn_metadata(new ColumnDef(ByteBufferUtil.bytes("col_name_timeuuid"), TimeUUIDType.class.getName()));
}
KsDef ks = new KsDef(ksName, "org.apache.cassandra.locator.SimpleStrategy", Arrays.asList(cf));
ks.setStrategy_options(KSMetaData.optsWithRF(configuration.getInt(CassandraClientHolder.CONF_PARAM_REPLICATION_FACTOR, 1)));
return ks;
}
use of org.apache.cassandra.thrift.ColumnDef in project eiger by wlloyd.
the class CFMetaDataTest method testThriftToAvroConversion.
@Test
public void testThriftToAvroConversion() throws Exception {
CfDef cfDef = new CfDef().setDefault_validation_class(AsciiType.class.getCanonicalName()).setComment("Test comment").setColumn_metadata(columnDefs).setKeyspace(KEYSPACE).setName(COLUMN_FAMILY);
// convert Thrift to CFMetaData
CFMetaData cfMetaData = CFMetaData.fromThrift(cfDef);
// make a correct Avro object
org.apache.cassandra.db.migration.avro.CfDef avroCfDef = new org.apache.cassandra.db.migration.avro.CfDef();
avroCfDef.keyspace = new Utf8(KEYSPACE);
avroCfDef.name = new Utf8(COLUMN_FAMILY);
avroCfDef.default_validation_class = new Utf8(cfDef.default_validation_class);
avroCfDef.comment = new Utf8(cfDef.comment);
avroCfDef.column_metadata = new ArrayList<org.apache.cassandra.db.migration.avro.ColumnDef>();
for (ColumnDef columnDef : columnDefs) {
org.apache.cassandra.db.migration.avro.ColumnDef c = new org.apache.cassandra.db.migration.avro.ColumnDef();
c.name = ByteBufferUtil.clone(columnDef.name);
c.validation_class = new Utf8(columnDef.getValidation_class());
c.index_name = new Utf8(columnDef.getIndex_name());
c.index_type = org.apache.cassandra.db.migration.avro.IndexType.KEYS;
avroCfDef.column_metadata.add(c);
}
org.apache.cassandra.db.migration.avro.CfDef converted = cfMetaData.toAvro();
assertEquals(avroCfDef.keyspace, converted.keyspace);
assertEquals(avroCfDef.name, converted.name);
assertEquals(avroCfDef.default_validation_class, converted.default_validation_class);
assertEquals(avroCfDef.comment, converted.comment);
assertEquals(avroCfDef.column_metadata, converted.column_metadata);
}
Aggregations