use of com.datastax.oss.dsbulk.mapping.CQLWord in project dsbulk by datastax.
the class SchemaSettings method appendBatchChildQuery.
private void appendBatchChildQuery(StringBuilder sb, CQLWord variable, CQLFragment value, @Nullable CQLFragment writetime, @Nullable CQLFragment ttl, List<CQLWord> pks) {
sb.append("INSERT INTO ").append(keyspaceName.render(VARIABLE)).append('.').append(tableName.render(VARIABLE)).append(" (");
for (CQLWord pk : pks) {
sb.append(pk.render(VARIABLE));
sb.append(", ");
}
sb.append(variable.render(VARIABLE)).append(") VALUES (");
for (CQLWord pk : pks) {
sb.append(pk.render(NAMED_ASSIGNMENT));
sb.append(", ");
}
sb.append(value.render(NAMED_ASSIGNMENT)).append(")");
appendWriteTimeAndTTL(sb, writetime, ttl);
}
use of com.datastax.oss.dsbulk.mapping.CQLWord in project dsbulk by datastax.
the class SchemaSettings method inferBatchInsertQuery.
private String inferBatchInsertQuery(ImmutableMultimap<MappingField, CQLFragment> fieldsToVariables) {
List<CQLWord> pks = primaryKeyColumns();
Set<CQLFragment> allSpecificVariables = new LinkedHashSet<>();
Map<CQLWord, WriteTimeAndTTL> specificWriteTimesAndTTLs = new LinkedHashMap<>();
boolean hasGlobalWritetime = false;
boolean hasGlobalTTL = false;
for (CQLFragment variable : fieldsToVariables.values()) {
if (variable instanceof FunctionCall) {
FunctionCall functionCall = (FunctionCall) variable;
if (functionCall.getFunctionName().equals(WRITETIME)) {
for (CQLFragment arg : functionCall.getArgs()) {
if (arg.equals(STAR)) {
if (preserveTimestamp) {
throw new IllegalStateException("Invalid mapping: writetime(*) is not allowed when schema.preserveTimestamp is true.");
}
hasGlobalWritetime = true;
} else {
CQLWord col = (CQLWord) arg;
if (pks.contains(col)) {
throw new IllegalStateException("Invalid mapping: writetime() function arg must be either '*' or a non-primary key column name.");
}
if (fieldsToVariables.containsValue(col)) {
allSpecificVariables.add(col);
allSpecificVariables.add(functionCall);
specificWriteTimesAndTTLs.compute(col, (k, v) -> {
if (v == null) {
v = new WriteTimeAndTTL();
MappingField colField = fieldsToVariables.inverse().get(col).iterator().next();
v.value = colField instanceof CQLFragment ? (CQLFragment) colField : col;
}
MappingField writetimeField = fieldsToVariables.inverse().get(functionCall).iterator().next();
v.writetime = writetimeField instanceof CQLLiteral ? (CQLLiteral) writetimeField : CQLWord.fromInternal(functionCall.render(INTERNAL));
return v;
});
} else {
throw new IllegalStateException(String.format("Invalid mapping: target column %s must be present if %s is also present.", col.render(VARIABLE), functionCall.render(INTERNAL)));
}
}
}
} else if (functionCall.getFunctionName().equals(TTL)) {
for (CQLFragment arg : functionCall.getArgs()) {
if (arg.equals(STAR)) {
if (preserveTtl) {
throw new IllegalStateException("Invalid mapping: ttl(*) is not allowed when schema.preserveTtl is true.");
}
hasGlobalTTL = true;
} else {
CQLWord col = (CQLWord) arg;
if (pks.contains(col)) {
throw new IllegalStateException("Invalid mapping: ttl() function arg must be either '*' or a non-primary key column name.");
}
if (fieldsToVariables.containsValue(col)) {
allSpecificVariables.add(col);
allSpecificVariables.add(functionCall);
specificWriteTimesAndTTLs.compute((CQLWord) arg, (k, v) -> {
if (v == null) {
v = new WriteTimeAndTTL();
MappingField colField = fieldsToVariables.inverse().get(col).iterator().next();
v.value = colField instanceof CQLFragment ? (CQLFragment) colField : col;
}
MappingField ttlField = fieldsToVariables.inverse().get(functionCall).iterator().next();
v.ttl = ttlField instanceof CQLLiteral ? (CQLLiteral) ttlField : CQLWord.fromInternal(functionCall.render(INTERNAL));
return v;
});
} else {
throw new IllegalStateException(String.format("Invalid mapping: target column %s must be present if %s is also present.", col.render(VARIABLE), functionCall.render(INTERNAL)));
}
}
}
}
}
}
ImmutableMultimap.Builder<MappingField, CQLFragment> defaultFieldsToVariablesBuilder = ImmutableMultimap.builder();
for (Entry<MappingField, CQLFragment> entry : fieldsToVariables.entries()) {
CQLFragment variable = entry.getValue();
if (!allSpecificVariables.contains(variable)) {
defaultFieldsToVariablesBuilder.put(entry);
}
}
ImmutableMultimap<MappingField, CQLFragment> defaultFieldsToVariables = defaultFieldsToVariablesBuilder.build();
boolean hasRegularColumnsWithoutSpecificWritetimeAndTTL = defaultFieldsToVariables.values().stream().filter(CQLWord.class::isInstance).map(CQLWord.class::cast).anyMatch(variable -> !pks.contains(variable));
if (!hasRegularColumnsWithoutSpecificWritetimeAndTTL) {
if (hasGlobalWritetime) {
throw new IllegalStateException("Invalid mapping: writetime(*) function has no target column.");
}
if (hasGlobalTTL) {
throw new IllegalStateException("Invalid mapping: ttl(*) function has no target column.");
}
}
StringBuilder sb = new StringBuilder();
if (!hasRegularColumnsWithoutSpecificWritetimeAndTTL && specificWriteTimesAndTTLs.size() == 1) {
// edge case: there is only one regular column in the table,
// and it has specific writetime or tll: no need for a BATCH as there is only one child
// statement.
Entry<CQLWord, WriteTimeAndTTL> entry = specificWriteTimesAndTTLs.entrySet().iterator().next();
appendBatchChildQuery(sb, entry.getKey(), entry.getValue().value, entry.getValue().writetime, entry.getValue().ttl, pks);
} else {
sb.append("BEGIN UNLOGGED BATCH ");
// generate a first INSERT INTO child query similar to the ones generated for simple INSERTs.
if (hasRegularColumnsWithoutSpecificWritetimeAndTTL) {
sb.append(inferInsertQuery(defaultFieldsToVariables)).append("; ");
}
// generate a specific INSERT INTO query for that variable only + its TTL and/or writetime.
for (Entry<CQLWord, WriteTimeAndTTL> entry : specificWriteTimesAndTTLs.entrySet()) {
appendBatchChildQuery(sb, entry.getKey(), entry.getValue().value, entry.getValue().writetime, entry.getValue().ttl, pks);
sb.append("; ");
}
sb.append("APPLY BATCH");
}
return sb.toString();
}
use of com.datastax.oss.dsbulk.mapping.CQLWord in project dsbulk by datastax.
the class SchemaSettings method createReadStatements.
public List<Statement<?>> createReadStatements(CqlSession session) {
PreparedStatement preparedStatement = preparedStatements.get(0);
ColumnDefinitions variables = preparedStatement.getVariableDefinitions();
if (variables.size() == 0) {
return Collections.singletonList(preparedStatement.bind());
}
boolean ok = true;
Optional<CQLWord> start = queryInspector.getTokenRangeRestrictionStartVariable();
Optional<CQLWord> end = queryInspector.getTokenRangeRestrictionEndVariable();
if (!start.isPresent() || !end.isPresent()) {
ok = false;
}
if (start.isPresent() && end.isPresent()) {
Optional<CQLWord> unrecognized = StreamSupport.stream(variables.spliterator(), false).map(columnDefinition -> columnDefinition.getName().asInternal()).map(CQLWord::fromInternal).filter(name -> !name.equals(start.get()) && !name.equals(end.get())).findAny();
ok = !unrecognized.isPresent();
}
if (!ok) {
throw new IllegalArgumentException("The provided statement (schema.query) contains unrecognized WHERE restrictions; " + "the WHERE clause is only allowed to contain one token range restriction " + "of the form: WHERE token(...) > ? AND token(...) <= ?");
}
Metadata metadata = session.getMetadata();
TokenRangeReadStatementGenerator generator = new TokenRangeReadStatementGenerator(table, metadata);
List<Statement<?>> statements = generator.generate(splits, range -> preparedStatement.bind().setToken(queryInspector.getTokenRangeRestrictionStartVariableIndex(), range.getStart()).setToken(queryInspector.getTokenRangeRestrictionEndVariableIndex(), range.getEnd()));
LOGGER.debug("Generated {} bound statements", statements.size());
// Shuffle the statements to avoid hitting the same replicas sequentially when
// the statements will be executed.
Collections.shuffle(statements);
return statements;
}
use of com.datastax.oss.dsbulk.mapping.CQLWord in project dsbulk by datastax.
the class SchemaSettings method inferUpdateCounterQuery.
private String inferUpdateCounterQuery(ImmutableMultimap<MappingField, CQLFragment> fieldsToVariables) {
StringBuilder sb = new StringBuilder("UPDATE ");
sb.append(keyspaceName.render(VARIABLE)).append('.').append(tableName.render(VARIABLE));
// Note: TTL and timestamp are not allowed in counter queries;
// a test is made inside the following method for fixed TTL and timestamps;
// function-style TTL and timestamps will be tested below and forbidden as well
appendWriteTimeAndTTL(sb, null, null);
sb.append(" SET ");
Set<CQLFragment> cols = maybeSortCols(fieldsToVariables);
Iterator<CQLFragment> colsIterator = cols.iterator();
boolean isFirst = true;
List<CQLWord> pks = primaryKeyColumns();
while (colsIterator.hasNext()) {
CQLFragment col = colsIterator.next();
if (col instanceof CQLWord && pks.contains(col)) {
continue;
}
// forbid writetime and TTL right-hand function calls when updating a counter table
if (col instanceof FunctionCall) {
throw new IllegalArgumentException("Invalid mapping: function calls are not allowed when updating a counter table.");
}
// for update queries there can be only one field mapped to a given column
MappingField field = fieldsToVariables.inverse().get(col).iterator().next();
if (field instanceof FunctionCall) {
throw new IllegalArgumentException("Invalid mapping: function calls are not allowed when updating a counter table.");
} else if (field instanceof CQLLiteral) {
throw new IllegalArgumentException("Invalid mapping: constant expressions are not allowed when updating a counter table.");
}
if (!isFirst) {
sb.append(", ");
}
isFirst = false;
sb.append(col.render(VARIABLE)).append(" = ").append(col.render(VARIABLE)).append(" + ").append(col.render(NAMED_ASSIGNMENT));
}
sb.append(" WHERE ");
Iterator<CQLWord> pksIterator = pks.iterator();
while (pksIterator.hasNext()) {
CQLFragment col = pksIterator.next();
sb.append(col.render(VARIABLE)).append(" = ").append(col.render(NAMED_ASSIGNMENT));
if (pksIterator.hasNext()) {
sb.append(" AND ");
}
}
return sb.toString();
}
use of com.datastax.oss.dsbulk.mapping.CQLWord in project dsbulk by datastax.
the class DefaultReadResultMapper method map.
@NonNull
@Override
public Record map(@NonNull ReadResult result) {
Object source = retainRecordSources ? result : null;
try {
Row row = result.getRow().orElseThrow(IllegalStateException::new);
ColumnDefinitions columnDefinitions = row.getColumnDefinitions();
DefaultRecord record = new DefaultRecord(source, resource, -1);
for (ColumnDefinition def : columnDefinitions) {
CQLWord variable = CQLWord.fromInternal(def.getName().asInternal());
CqlIdentifier name = variable.asIdentifier();
DataType cqlType = def.getType();
Set<Field> fields = mapping.variableToFields(variable);
for (Field field : fields) {
GenericType<?> fieldType = null;
try {
fieldType = recordMetadata.getFieldType(field, cqlType);
TypeCodec<?> codec = mapping.codec(variable, cqlType, fieldType);
Object value = row.get(name, codec);
record.setFieldValue(field, value);
} catch (Exception e) {
String msg = String.format("Could not deserialize column %s of type %s as %s", name.asCql(true), cqlType, fieldType);
throw new IllegalArgumentException(msg, e);
}
}
}
return record;
} catch (Exception e) {
return new DefaultErrorRecord(source, resource, -1, e);
}
}
Aggregations