use of com.datastax.driver.core.UserType in project cassandra by apache.
the class CQLSSTableWriterTest method testWritesWithUdts.
@Test
@SuppressWarnings("unchecked")
public void testWritesWithUdts() throws Exception {
final String KS = "cql_keyspace3";
final String TABLE = "table3";
final String schema = "CREATE TABLE " + KS + "." + TABLE + " (" + " k int," + " v1 list<frozen<tuple2>>," + " v2 frozen<tuple3>," + " PRIMARY KEY (k)" + ")";
File tempdir = Files.createTempDir();
File dataDir = new File(tempdir.getAbsolutePath() + File.separator + KS + File.separator + TABLE);
assert dataDir.mkdirs();
CQLSSTableWriter writer = CQLSSTableWriter.builder().inDirectory(dataDir).withType("CREATE TYPE " + KS + ".tuple2 (a int, b int)").withType("CREATE TYPE " + KS + ".tuple3 (a int, b int, c int)").forTable(schema).using("INSERT INTO " + KS + "." + TABLE + " (k, v1, v2) " + "VALUES (?, ?, ?)").build();
UserType tuple2Type = writer.getUDType("tuple2");
UserType tuple3Type = writer.getUDType("tuple3");
for (int i = 0; i < 100; i++) {
writer.addRow(i, ImmutableList.builder().add(tuple2Type.newValue().setInt("a", i * 10).setInt("b", i * 20)).add(tuple2Type.newValue().setInt("a", i * 30).setInt("b", i * 40)).build(), tuple3Type.newValue().setInt("a", i * 100).setInt("b", i * 200).setInt("c", i * 300));
}
writer.close();
loadSSTables(dataDir, KS);
UntypedResultSet resultSet = QueryProcessor.executeInternal("SELECT * FROM " + KS + "." + TABLE);
TypeCodec collectionCodec = UDHelper.codecFor(DataType.CollectionType.frozenList(tuple2Type));
TypeCodec tuple3Codec = UDHelper.codecFor(tuple3Type);
assertEquals(resultSet.size(), 100);
int cnt = 0;
for (UntypedResultSet.Row row : resultSet) {
assertEquals(cnt, row.getInt("k"));
List<UDTValue> values = (List<UDTValue>) collectionCodec.deserialize(row.getBytes("v1"), ProtocolVersion.NEWEST_SUPPORTED);
assertEquals(values.get(0).getInt("a"), cnt * 10);
assertEquals(values.get(0).getInt("b"), cnt * 20);
assertEquals(values.get(1).getInt("a"), cnt * 30);
assertEquals(values.get(1).getInt("b"), cnt * 40);
UDTValue v2 = (UDTValue) tuple3Codec.deserialize(row.getBytes("v2"), ProtocolVersion.NEWEST_SUPPORTED);
assertEquals(v2.getInt("a"), cnt * 100);
assertEquals(v2.getInt("b"), cnt * 200);
assertEquals(v2.getInt("c"), cnt * 300);
cnt++;
}
}
use of com.datastax.driver.core.UserType in project cassandra by apache.
the class CQLSSTableWriterTest method testWritesWithDependentUdts.
@Test
@SuppressWarnings("unchecked")
public void testWritesWithDependentUdts() throws Exception {
final String KS = "cql_keyspace4";
final String TABLE = "table4";
final String schema = "CREATE TABLE " + KS + "." + TABLE + " (" + " k int," + " v1 frozen<nested_tuple>," + " PRIMARY KEY (k)" + ")";
File tempdir = Files.createTempDir();
File dataDir = new File(tempdir.getAbsolutePath() + File.separator + KS + File.separator + TABLE);
assert dataDir.mkdirs();
CQLSSTableWriter writer = CQLSSTableWriter.builder().inDirectory(dataDir).withType("CREATE TYPE " + KS + ".nested_tuple (c int, tpl frozen<tuple2>)").withType("CREATE TYPE " + KS + ".tuple2 (a int, b int)").forTable(schema).using("INSERT INTO " + KS + "." + TABLE + " (k, v1) " + "VALUES (?, ?)").build();
UserType tuple2Type = writer.getUDType("tuple2");
UserType nestedTuple = writer.getUDType("nested_tuple");
TypeCodec tuple2Codec = UDHelper.codecFor(tuple2Type);
TypeCodec nestedTupleCodec = UDHelper.codecFor(nestedTuple);
for (int i = 0; i < 100; i++) {
writer.addRow(i, nestedTuple.newValue().setInt("c", i * 100).set("tpl", tuple2Type.newValue().setInt("a", i * 200).setInt("b", i * 300), tuple2Codec));
}
writer.close();
loadSSTables(dataDir, KS);
UntypedResultSet resultSet = QueryProcessor.executeInternal("SELECT * FROM " + KS + "." + TABLE);
assertEquals(resultSet.size(), 100);
int cnt = 0;
for (UntypedResultSet.Row row : resultSet) {
assertEquals(cnt, row.getInt("k"));
UDTValue nestedTpl = (UDTValue) nestedTupleCodec.deserialize(row.getBytes("v1"), ProtocolVersion.NEWEST_SUPPORTED);
assertEquals(nestedTpl.getInt("c"), cnt * 100);
UDTValue tpl = nestedTpl.getUDTValue("tpl");
assertEquals(tpl.getInt("a"), cnt * 200);
assertEquals(tpl.getInt("b"), cnt * 300);
cnt++;
}
}
use of com.datastax.driver.core.UserType in project SimpleFlatMapper by arnaudroger.
the class DatastaxUDTGetter method newUDTMapper.
public static <P> Mapper<GettableByIndexData, P> newUDTMapper(Type target, UserType tt, DatastaxMapperFactory factory) {
ConstantSourceMapperBuilder<GettableByIndexData, P, DatastaxColumnKey> builder = newFieldMapperBuilder(factory, target);
Iterator<UserType.Field> iterator = tt.iterator();
int i = 0;
while (iterator.hasNext()) {
UserType.Field f = iterator.next();
FieldMapperColumnDefinition<DatastaxColumnKey> identity = FieldMapperColumnDefinition.identity();
builder.addMapping(new DatastaxColumnKey(f.getName(), i, f.getType()), identity);
i++;
}
return builder.mapper();
}
use of com.datastax.driver.core.UserType in project gora by apache.
the class AvroSerializer method put.
/**
* {@inheritDoc}
*
* @param key
* @param persistent
*/
@Override
public void put(Object key, Persistent persistent) throws GoraException {
try {
if (persistent instanceof PersistentBase) {
if (persistent.isDirty()) {
PersistentBase persistentBase = (PersistentBase) persistent;
ArrayList<String> fields = new ArrayList<>();
ArrayList<Object> values = new ArrayList<>();
AvroCassandraUtils.processKeys(mapping, key, fields, values);
for (Schema.Field f : persistentBase.getSchema().getFields()) {
String fieldName = f.name();
Field field = mapping.getFieldFromFieldName(fieldName);
if (field == null) {
LOG.debug("Ignoring {} adding field, {} field can't find in {} mapping", new Object[] { fieldName, fieldName, persistentClass });
continue;
}
if (persistent.isDirty(f.pos()) || mapping.getInlinedDefinedPartitionKey().equals(mapping.getFieldFromFieldName(fieldName))) {
Object value = persistentBase.get(f.pos());
String fieldType = field.getType();
if (fieldType.contains("frozen")) {
fieldType = fieldType.substring(fieldType.indexOf("<") + 1, fieldType.indexOf(">"));
UserType userType = client.getSession().getCluster().getMetadata().getKeyspace(mapping.getKeySpace().getName()).getUserType(fieldType);
UDTValue udtValue = userType.newValue();
Schema udtSchema = f.schema();
if (udtSchema.getType().equals(Schema.Type.UNION)) {
for (Schema schema : udtSchema.getTypes()) {
if (schema.getType().equals(Schema.Type.RECORD)) {
udtSchema = schema;
break;
}
}
}
PersistentBase udtObjectBase = (PersistentBase) value;
for (Schema.Field udtField : udtSchema.getFields()) {
Object udtFieldValue = AvroCassandraUtils.getFieldValueFromAvroBean(udtField.schema(), udtField.schema().getType(), udtObjectBase.get(udtField.name()), field);
if (udtField.schema().getType().equals(Schema.Type.MAP)) {
udtValue.setMap(udtField.name(), (Map) udtFieldValue);
} else if (udtField.schema().getType().equals(Schema.Type.ARRAY)) {
udtValue.setList(udtField.name(), (List) udtFieldValue);
} else {
udtValue.set(udtField.name(), udtFieldValue, (Class) udtFieldValue.getClass());
}
}
value = udtValue;
} else {
value = AvroCassandraUtils.getFieldValueFromAvroBean(f.schema(), f.schema().getType(), value, field);
}
values.add(value);
fields.add(fieldName);
}
}
String cqlQuery = CassandraQueryFactory.getInsertDataQuery(mapping, fields);
SimpleStatement statement = new SimpleStatement(cqlQuery, values.toArray());
if (writeConsistencyLevel != null) {
statement.setConsistencyLevel(ConsistencyLevel.valueOf(writeConsistencyLevel));
}
client.getSession().execute(statement);
} else {
LOG.info("Ignored putting persistent bean {} in the store as it is neither " + "new, neither dirty.", new Object[] { persistent });
}
} else {
LOG.error("{} Persistent bean isn't extended by {} .", new Object[] { this.persistentClass, PersistentBase.class });
}
} catch (Exception e) {
throw new GoraException(e);
}
}
use of com.datastax.driver.core.UserType in project apex-malhar by apache.
the class UserUpsertOperator method getCodecsForUserDefinedTypes.
@Override
public Map<String, TypeCodec> getCodecsForUserDefinedTypes() {
Map<String, TypeCodec> allCodecs = new HashMap<>();
CodecRegistry codecRegistry = cluster.getConfiguration().getCodecRegistry();
UserType addressType = cluster.getMetadata().getKeyspace(getConnectionStateManager().getKeyspaceName()).getUserType("address");
TypeCodec<UDTValue> addressTypeCodec = codecRegistry.codecFor(addressType);
AddressCodec addressCodec = new AddressCodec(addressTypeCodec, Address.class);
allCodecs.put("currentaddress", addressCodec);
UserType userFullNameType = cluster.getMetadata().getKeyspace(getConnectionStateManager().getKeyspaceName()).getUserType("fullname");
TypeCodec<UDTValue> userFullNameTypeCodec = codecRegistry.codecFor(userFullNameType);
FullNameCodec fullNameCodec = new FullNameCodec(userFullNameTypeCodec, FullName.class);
allCodecs.put("username", fullNameCodec);
return allCodecs;
}
Aggregations