use of org.apache.gora.cassandra.bean.Field in project gora by apache.
the class CassandraQueryFactory method getCreateTableQuery.
/**
* This method returns the CQL query to table.
* refer : http://docs.datastax.com/en/cql/3.1/cql/cql_reference/create_table_r.html
* <p>
* Trick : To have a consistency of the order of the columns, first we append partition keys, second cluster keys and finally other columns.
* It's very much needed to follow the same order in other CRUD operations as well.
*
* @param mapping Cassandra mapping {@link CassandraMapping}
* @return CQL Query
*/
static String getCreateTableQuery(CassandraMapping mapping) {
StringBuilder stringBuffer = new StringBuilder();
stringBuffer.append("CREATE TABLE IF NOT EXISTS ").append(mapping.getKeySpace().getName()).append(".").append(mapping.getCoreName()).append(" (");
CassandraKey cassandraKey = mapping.getCassandraKey();
// appending Cassandra Persistent columns into db schema
processFieldsForCreateTableQuery(mapping.getFieldList(), false, stringBuffer);
if (cassandraKey != null) {
processFieldsForCreateTableQuery(cassandraKey.getFieldList(), true, stringBuffer);
List<PartitionKeyField> partitionKeys = cassandraKey.getPartitionKeyFields();
if (partitionKeys != null) {
stringBuffer.append(", PRIMARY KEY (");
boolean isCommaNeededToApply = false;
for (PartitionKeyField keyField : partitionKeys) {
if (isCommaNeededToApply) {
stringBuffer.append(",");
}
if (keyField.isComposite()) {
stringBuffer.append("(");
boolean isCommaNeededHere = false;
for (Field field : keyField.getFields()) {
if (isCommaNeededHere) {
stringBuffer.append(", ");
}
stringBuffer.append(field.getColumnName());
isCommaNeededHere = true;
}
stringBuffer.append(")");
} else {
stringBuffer.append(keyField.getColumnName());
}
isCommaNeededToApply = true;
}
stringBuffer.append(")");
}
}
stringBuffer.append(")");
boolean isWithNeeded = true;
if (Boolean.parseBoolean(mapping.getProperty("compactStorage"))) {
stringBuffer.append(" WITH COMPACT STORAGE ");
isWithNeeded = false;
}
String id = mapping.getProperty("id");
if (id != null) {
if (isWithNeeded) {
stringBuffer.append(" WITH ");
} else {
stringBuffer.append(" AND ");
}
stringBuffer.append("ID = '").append(id).append("'");
isWithNeeded = false;
}
if (cassandraKey != null) {
List<ClusterKeyField> clusterKeyFields = cassandraKey.getClusterKeyFields();
if (clusterKeyFields != null) {
if (isWithNeeded) {
stringBuffer.append(" WITH ");
} else {
stringBuffer.append(" AND ");
}
stringBuffer.append(" CLUSTERING ORDER BY (");
boolean isCommaNeededToApply = false;
for (ClusterKeyField keyField : clusterKeyFields) {
if (isCommaNeededToApply) {
stringBuffer.append(", ");
}
stringBuffer.append(keyField.getColumnName()).append(" ");
if (keyField.getOrder() != null) {
stringBuffer.append(keyField.getOrder());
}
isCommaNeededToApply = true;
}
stringBuffer.append(")");
}
}
return stringBuffer.toString();
}
use of org.apache.gora.cassandra.bean.Field in project gora by apache.
the class CassandraQueryFactory method getUpdateByQueryForAvro.
/**
* This method returns the CQL Query for UpdateByQuery method
* refer : http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlUpdate.html
*
* @param mapping Cassandra mapping {@link CassandraMapping}
* @param cassandraQuery Cassandra Query {@link CassandraQuery}
* @param objects field Objects list
* @return CQL Query
*/
static String getUpdateByQueryForAvro(CassandraMapping mapping, Query cassandraQuery, List<Object> objects, Schema schema) {
Update update = QueryBuilder.update(mapping.getKeySpace().getName(), mapping.getCoreName());
Update.Assignments updateAssignments = null;
if (cassandraQuery instanceof CassandraQuery) {
String[] columnNames = getColumnNames(mapping, Arrays.asList(cassandraQuery.getFields()));
for (String column : columnNames) {
updateAssignments = update.with(QueryBuilder.set(column, "?"));
Field field = mapping.getFieldFromColumnName(column);
Object value = ((CassandraQuery) cassandraQuery).getUpdateFieldValue(field.getFieldName());
try {
Schema schemaField = schema.getField(field.getFieldName()).schema();
objects.add(AvroCassandraUtils.getFieldValueFromAvroBean(schemaField, schemaField.getType(), value, field));
} catch (NullPointerException e) {
throw new RuntimeException(field + " field couldn't find in the class " + mapping.getPersistentClass() + ".");
}
}
} else {
throw new RuntimeException("Please use Cassandra Query object to invoke, UpdateByQuery method.");
}
return processQuery(cassandraQuery, updateAssignments, mapping, objects);
}
use of org.apache.gora.cassandra.bean.Field in project gora by apache.
the class CassandraClient method registerOptionalCodecs.
private void registerOptionalCodecs() {
// Optional Codecs for natives
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.ascii()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.bigint()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.blob()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.cboolean()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.cdouble()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.cfloat()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.cint()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.counter()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.date()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.decimal()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.inet()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.smallInt()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.time()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.timestamp()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.timeUUID()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.tinyInt()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.varint()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.varchar()));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.uuid()));
// Optional Array Codecs
this.cluster.getConfiguration().getCodecRegistry().register(new IntArrayCodec());
this.cluster.getConfiguration().getCodecRegistry().register(new DoubleArrayCodec());
this.cluster.getConfiguration().getCodecRegistry().register(new FloatArrayCodec());
this.cluster.getConfiguration().getCodecRegistry().register(new LongArrayCodec());
this.cluster.getConfiguration().getCodecRegistry().register(new ObjectArrayCodec<>(DataType.list(DataType.varchar()), String[].class, TypeCodec.varchar()));
// Optional Time Codecs
this.cluster.getConfiguration().getCodecRegistry().register(new SimpleDateCodec());
this.cluster.getConfiguration().getCodecRegistry().register(new SimpleTimestampCodec());
for (Field field : this.mapping.getFieldList()) {
String columnType = field.getType().toLowerCase(Locale.ENGLISH);
// http://docs.datastax.com/en/cql/3.3/cql/cql_reference/cql_data_types_c.html
if (columnType.contains("list")) {
columnType = columnType.substring(columnType.indexOf("<") + 1, columnType.indexOf(">"));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.list(getTypeCodec(columnType))));
} else if (columnType.contains("set")) {
columnType = columnType.substring(columnType.indexOf("<") + 1, columnType.indexOf(">"));
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.set(getTypeCodec(columnType))));
} else if (columnType.contains("map")) {
String[] columnTypes = columnType.substring(columnType.indexOf("<") + 1, columnType.indexOf(">")).split(",");
this.cluster.getConfiguration().getCodecRegistry().register(new OptionalCodec<>(TypeCodec.map(TypeCodec.set(getTypeCodec(columnTypes[0])), TypeCodec.set(getTypeCodec(columnTypes[1])))));
}
}
}
use of org.apache.gora.cassandra.bean.Field in project gora by apache.
the class AvroSerializer method put.
/**
* {@inheritDoc}
*
* @param key
* @param persistent
*/
@Override
public void put(Object key, Persistent persistent) throws GoraException {
try {
if (persistent instanceof PersistentBase) {
if (persistent.isDirty()) {
PersistentBase persistentBase = (PersistentBase) persistent;
ArrayList<String> fields = new ArrayList<>();
ArrayList<Object> values = new ArrayList<>();
AvroCassandraUtils.processKeys(mapping, key, fields, values);
for (Schema.Field f : persistentBase.getSchema().getFields()) {
String fieldName = f.name();
Field field = mapping.getFieldFromFieldName(fieldName);
if (field == null) {
LOG.debug("Ignoring {} adding field, {} field can't find in {} mapping", new Object[] { fieldName, fieldName, persistentClass });
continue;
}
if (persistent.isDirty(f.pos()) || mapping.getInlinedDefinedPartitionKey().equals(mapping.getFieldFromFieldName(fieldName))) {
Object value = persistentBase.get(f.pos());
String fieldType = field.getType();
if (fieldType.contains("frozen")) {
fieldType = fieldType.substring(fieldType.indexOf("<") + 1, fieldType.indexOf(">"));
UserType userType = client.getSession().getCluster().getMetadata().getKeyspace(mapping.getKeySpace().getName()).getUserType(fieldType);
UDTValue udtValue = userType.newValue();
Schema udtSchema = f.schema();
if (udtSchema.getType().equals(Schema.Type.UNION)) {
for (Schema schema : udtSchema.getTypes()) {
if (schema.getType().equals(Schema.Type.RECORD)) {
udtSchema = schema;
break;
}
}
}
PersistentBase udtObjectBase = (PersistentBase) value;
for (Schema.Field udtField : udtSchema.getFields()) {
Object udtFieldValue = AvroCassandraUtils.getFieldValueFromAvroBean(udtField.schema(), udtField.schema().getType(), udtObjectBase.get(udtField.name()), field);
if (udtField.schema().getType().equals(Schema.Type.MAP)) {
udtValue.setMap(udtField.name(), (Map) udtFieldValue);
} else if (udtField.schema().getType().equals(Schema.Type.ARRAY)) {
udtValue.setList(udtField.name(), (List) udtFieldValue);
} else {
udtValue.set(udtField.name(), udtFieldValue, (Class) udtFieldValue.getClass());
}
}
value = udtValue;
} else {
value = AvroCassandraUtils.getFieldValueFromAvroBean(f.schema(), f.schema().getType(), value, field);
}
values.add(value);
fields.add(fieldName);
}
}
String cqlQuery = CassandraQueryFactory.getInsertDataQuery(mapping, fields);
SimpleStatement statement = new SimpleStatement(cqlQuery, values.toArray());
if (writeConsistencyLevel != null) {
statement.setConsistencyLevel(ConsistencyLevel.valueOf(writeConsistencyLevel));
}
client.getSession().execute(statement);
} else {
LOG.info("Ignored putting persistent bean {} in the store as it is neither " + "new, neither dirty.", new Object[] { persistent });
}
} else {
LOG.error("{} Persistent bean isn't extended by {} .", new Object[] { this.persistentClass, PersistentBase.class });
}
} catch (Exception e) {
throw new GoraException(e);
}
}
use of org.apache.gora.cassandra.bean.Field in project gora by apache.
the class AvroSerializer method analyzePersistent.
/**
* {@inheritDoc}
*
* @throws Exception
*/
protected void analyzePersistent() throws Exception {
userDefineTypeMaps = new HashMap<>();
for (Field field : mapping.getFieldList()) {
String fieldType = field.getType();
if (fieldType.contains("frozen")) {
String udtType = fieldType.substring(fieldType.indexOf("<") + 1, fieldType.indexOf(">"));
if (PersistentBase.class.isAssignableFrom(persistentClass)) {
Schema fieldSchema = persistentSchema.getField(field.getFieldName()).schema();
if (fieldSchema.getType().equals(Schema.Type.UNION)) {
for (Schema currentSchema : fieldSchema.getTypes()) {
if (currentSchema.getType().equals(Schema.Type.RECORD)) {
fieldSchema = currentSchema;
break;
}
}
}
String createQuery = CassandraQueryFactory.getCreateUDTTypeForAvro(mapping, udtType, fieldSchema);
userDefineTypeMaps.put(udtType, createQuery);
} else {
throw new RuntimeException("Unsupported Class for User Define Types, Please use PersistentBase class. field : " + udtType);
}
}
}
}
Aggregations