use of com.datastax.driver.core.DataType in project cassandra by apache.
the class UDFContextImpl method newTupleValue.
public TupleValue newTupleValue(String cqlDefinition) {
AbstractType<?> abstractType = CQLTypeParser.parse(keyspaceMetadata.name, cqlDefinition, keyspaceMetadata.types);
DataType dataType = UDHelper.driverType(abstractType);
return newTupleValue(dataType);
}
use of com.datastax.driver.core.DataType in project cassandra by apache.
the class UDFContextImpl method newUDTValue.
public UDTValue newUDTValue(String udtName) {
Optional<org.apache.cassandra.db.marshal.UserType> udtType = keyspaceMetadata.types.get(ByteBufferUtil.bytes(udtName));
DataType dataType = UDHelper.driverType(udtType.orElseThrow(() -> new IllegalArgumentException("No UDT named " + udtName + " in keyspace " + keyspaceMetadata.name)));
return newUDTValue(dataType);
}
use of com.datastax.driver.core.DataType in project ats-framework by Axway.
the class CassandraDbProvider method extractObjectFromResultSet.
private static Object extractObjectFromResultSet(Row row, Definition columnDefinition) {
Object object;
String columnName = columnDefinition.getName();
DataType dataType = columnDefinition.getType();
Name columnTypeName = dataType.getName();
if (columnTypeName.equals(DataType.Name.UUID)) {
object = row.getUUID(columnName);
} else if (columnTypeName.equals(DataType.Name.TIMEUUID)) {
object = row.getUUID(columnName);
} else if (columnTypeName.equals(DataType.Name.BOOLEAN)) {
object = row.getBool(columnName);
} else if (columnTypeName.equals(DataType.Name.INT)) {
object = row.getInt(columnName);
} else if (columnTypeName.equals(DataType.Name.BIGINT)) {
object = row.getLong(columnName);
} else if (columnTypeName.equals(DataType.Name.FLOAT)) {
object = row.getFloat(columnName);
} else if (columnTypeName.equals(DataType.Name.DOUBLE)) {
object = row.getDouble(columnName);
} else if (columnTypeName.equals(DataType.Name.COUNTER)) {
object = row.getLong(columnName);
} else if (columnTypeName.equals(DataType.Name.DECIMAL)) {
object = row.getDecimal(columnName);
} else if (columnTypeName.equals(DataType.Name.TEXT) || columnTypeName.equals(DataType.Name.VARCHAR)) {
object = row.getString(columnName);
} else if (columnTypeName.equals(DataType.Name.TIMESTAMP)) {
object = row.getDate(columnName);
} else if (columnTypeName.equals(DataType.Name.BLOB)) {
ByteBuffer data = (ByteBuffer) row.getBytes(columnName);
if (data != null) {
byte[] result = new byte[data.remaining()];
data.get(result);
object = result;
} else {
object = null;
}
} else if (columnTypeName.equals(DataType.Name.SET)) {
object = row.getSet(columnName, dataType.getTypeArguments().get(0).asJavaClass());
} else if (columnTypeName.equals(DataType.Name.LIST)) {
object = row.getList(columnName, dataType.getTypeArguments().get(0).asJavaClass());
} else if (columnTypeName.equals(DataType.Name.MAP)) {
object = row.getMap(columnName, dataType.getTypeArguments().get(0).asJavaClass(), dataType.getTypeArguments().get(1).asJavaClass());
} else {
throw new DbException("Unsupported data type '" + columnDefinition.getType().toString() + "' for table '" + columnDefinition.getTable() + "' and column '" + columnName + "'");
}
return object;
}
use of com.datastax.driver.core.DataType in project cassandra-driver-mapping by valchkou.
the class EntityTypeParser method getColumnDataType.
/**
* by default data type retrieved from javaTypeToDataType.
*
* @Column columnDefinition may override datatype.
*/
private static DataType.Name getColumnDataType(Field f) {
Class<?> t = f.getType();
DataType.Name dataType = javaTypeToDataType.get(t);
if (t.isEnum()) {
// enum is a special type.
dataType = javaTypeToDataType.get(Enum.class);
}
Annotation columnA = f.getAnnotation(Column.class);
if (columnA instanceof Column) {
String typedef = ((Column) columnA).columnDefinition();
if (typedef != null && typedef.length() > 0) {
DataType.Name dt = DataType.Name.valueOf(typedef.toUpperCase());
if (dt != null) {
dataType = dt;
}
}
}
return dataType;
}
use of com.datastax.driver.core.DataType in project cassandra-driver-mapping by valchkou.
the class EntityTypeParser method parsePropertyLevelMetadata.
private static EntityTypeMetadata parsePropertyLevelMetadata(Class<?> clazz, EntityTypeMetadata result, PrimaryKeyMetadata pkmeta, boolean isPartitionKey) {
Field[] fields = clazz.getDeclaredFields();
Method[] methods = clazz.getDeclaredMethods();
for (Field f : fields) {
boolean isOwnField = false;
PrimaryKeyMetadata pkm = null;
// for embedded key go recursive
if (f.getAnnotation(EmbeddedId.class) != null || f.getAnnotation(Id.class) != null) {
isOwnField = true;
pkm = new PrimaryKeyMetadata();
pkm.setPartition(isPartitionKey);
if (isPartitionKey) {
pkmeta.setPartitionKey(pkm);
} else {
result.setPrimaryKeyMetadata(pkm);
}
parsePropertyLevelMetadata(f.getType(), result, pkm, true);
}
if ((f.getAnnotation(Transient.class) == null && javaTypeToDataType.get(f.getType()) != null) || isOwnField || f.getType().isEnum()) {
Method getter = null;
Method setter = null;
for (Method m : methods) {
// setter are defined
if (isGetterFor(m, f.getName())) {
getter = m;
} else if (isSetterFor(m, f)) {
setter = m;
}
if (setter != null && getter != null) {
String columnName = getColumnName(f);
DataType.Name dataType = getColumnDataType(f);
EntityFieldMetaData fd = new EntityFieldMetaData(f, dataType, getter, setter, columnName);
if (pkmeta != null && !isOwnField) {
fd.setPartition(pkmeta.isPartition());
fd.setPrimary(true);
pkmeta.addField(fd);
} else if (isOwnField) {
pkm.setOwnField(fd);
}
if (f.getAnnotation(EmbeddedId.class) != null) {
break;
}
if (f.getAnnotation(Version.class) != null) {
result.setVersionField(fd);
}
setCollections(f, fd);
if (f.getAnnotation(Static.class) != null) {
fd.setStatic(true);
}
if (f.getAnnotation(GeneratedValue.class) != null) {
fd.setAutoGenerate(true);
}
result.addField(fd);
// exit inner loop on filed's methods and go to
break;
// the next field
}
}
}
}
return result;
}
Aggregations