use of com.datastax.oss.dsbulk.connectors.api.DefaultRecord in project dsbulk by datastax.
the class RecordUtils method mappedJson.
public static Record mappedJson(String... tokens) {
int counter = COUNTER.incrementAndGet();
DefaultRecord record = DefaultRecord.indexed("source" + counter, URI.create("file://file" + counter + ".json"), counter - 1);
for (int i = 0; i < tokens.length; i += 2) {
record.put(new DefaultMappedField(tokens[i]), JsonNodeFactory.instance.textNode(tokens[i + 1]));
}
return record;
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultRecord in project dsbulk by datastax.
the class DefaultReadResultMapper method map.
@NonNull
@Override
public Record map(@NonNull ReadResult result) {
Object source = retainRecordSources ? result : null;
try {
Row row = result.getRow().orElseThrow(IllegalStateException::new);
ColumnDefinitions columnDefinitions = row.getColumnDefinitions();
DefaultRecord record = new DefaultRecord(source, resource, -1);
for (ColumnDefinition def : columnDefinitions) {
CQLWord variable = CQLWord.fromInternal(def.getName().asInternal());
CqlIdentifier name = variable.asIdentifier();
DataType cqlType = def.getType();
Set<Field> fields = mapping.variableToFields(variable);
for (Field field : fields) {
GenericType<?> fieldType = null;
try {
fieldType = recordMetadata.getFieldType(field, cqlType);
TypeCodec<?> codec = mapping.codec(variable, cqlType, fieldType);
Object value = row.get(name, codec);
record.setFieldValue(field, value);
} catch (Exception e) {
String msg = String.format("Could not deserialize column %s of type %s as %s", name.asCql(true), cqlType, fieldType);
throw new IllegalArgumentException(msg, e);
}
}
}
return record;
} catch (Exception e) {
return new DefaultErrorRecord(source, resource, -1, e);
}
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultRecord in project dsbulk by datastax.
the class RecordUtils method indexedCSV.
public static Record indexedCSV(String... values) {
int counter = COUNTER.incrementAndGet();
DefaultRecord record = DefaultRecord.indexed("source" + counter, URI.create("file://file" + counter + ".csv"), counter - 1);
for (int i = 0; i < values.length; i++) {
record.put(new DefaultIndexedField(i), values[i]);
}
return record;
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultRecord in project dsbulk by datastax.
the class RecordUtils method mappedCSV.
public static Record mappedCSV(String... tokens) {
int counter = COUNTER.incrementAndGet();
DefaultRecord record = DefaultRecord.indexed("source" + counter, URI.create("file://file" + counter + ".csv"), counter - 1);
for (int i = 0; i < tokens.length; i += 2) {
record.put(new DefaultMappedField(tokens[i]), tokens[i + 1]);
record.put(new DefaultIndexedField(i % 2), tokens[i + 1]);
}
return record;
}
Aggregations