use of io.cdap.cdap.spi.data.table.field.Field in project cdap by caskdata.
the class DefaultSecretStore method store.
@Override
public <T> void store(String namespace, String name, Encoder<T> encoder, T data) throws IOException {
TransactionRunners.run(transactionRunner, context -> {
StructuredTable table = context.getTable(StoreDefinition.SecretStore.SECRET_STORE_TABLE);
List<Field<?>> fields = ImmutableList.<Field<?>>builder().addAll(getKeyFields(namespace, name)).add(Fields.bytesField(StoreDefinition.SecretStore.SECRET_DATA_FIELD, encoder.encode(data))).build();
table.upsert(fields);
}, IOException.class);
}
use of io.cdap.cdap.spi.data.table.field.Field in project cdap by caskdata.
the class ConnectionStore method saveConnection.
/**
* Save the connection in the store.
*
* @param connectionId the connection id
* @param connection the connection information
* @param overWrite flag indicating whether the store should overwrite an existing connection with same connection id
* but different connection name, i.e, a b and a.b both convert to id a_b
*/
public void saveConnection(ConnectionId connectionId, Connection connection, boolean overWrite) {
TransactionRunners.run(transactionRunner, context -> {
StructuredTable table = context.getTable(TABLE_ID);
Connection oldConnection = getConnectionInternal(table, connectionId, false);
Connection newConnection = connection;
if (oldConnection != null) {
if (oldConnection.isPreConfigured()) {
throw new ConnectionConflictException(String.format("Connection %s in namespace %s has same id %s and is pre-configured. " + "Preconfigured connections cannot be updated or overwritten.", oldConnection.getName(), connectionId.getNamespace(), connectionId.getConnectionId()));
}
if (!oldConnection.getName().equals(newConnection.getName()) && !overWrite) {
throw new ConnectionConflictException(String.format("Connection %s in namespace %s has same id %s. Please choose a different connection name.", oldConnection.getName(), connectionId.getNamespace(), connectionId.getConnectionId()));
}
newConnection = new Connection(connection.getName(), oldConnection.getConnectionId(), connection.getConnectionType(), connection.getDescription(), connection.isPreConfigured(), connection.isDefault(), oldConnection.getCreatedTimeMillis(), connection.getUpdatedTimeMillis(), connection.getPlugin());
}
Collection<Field<?>> fields = getConnectionKeys(connectionId);
fields.add(Fields.longField(CREATED_COL, newConnection.getCreatedTimeMillis()));
fields.add(Fields.longField(UPDATED_COL, newConnection.getUpdatedTimeMillis()));
fields.add(Fields.stringField(CONNECTION_DATA_FIELD, GSON.toJson(newConnection)));
table.upsert(fields);
});
}
use of io.cdap.cdap.spi.data.table.field.Field in project cdap by caskdata.
the class DatasetTypeTable method writeModule.
public void writeModule(NamespaceId namespaceId, DatasetModuleMeta moduleMeta) throws IOException {
DatasetModuleId datasetModuleId = namespaceId.datasetModule(moduleMeta.getName());
DatasetModuleMeta existing = getModule(datasetModuleId);
List<Field<?>> fields = getModuleKey(namespaceId.getEntityName(), moduleMeta.getName());
fields.add(Fields.stringField(StoreDefinition.DatasetTypeStore.DATASET_METADATA_FIELD, GSON.toJson(moduleMeta)));
getModuleTable().upsert(fields);
for (String type : moduleMeta.getTypes()) {
writeTypeToModuleMapping(namespaceId.datasetType(type), datasetModuleId);
}
if (existing != null) {
Set<String> removed = new HashSet<>(existing.getTypes());
removed.removeAll(moduleMeta.getTypes());
for (String type : removed) {
getTypeTable().deleteAll(Range.singleton(getTypeKey(datasetModuleId.getNamespace(), type)));
}
}
}
use of io.cdap.cdap.spi.data.table.field.Field in project cdap by caskdata.
the class FieldLineageTable method getChecksumsWithProgramRunsInRange.
private Map<Long, Set<ProgramRunId>> getChecksumsWithProgramRunsInRange(String direction, EndPoint endPoint, long start, long end) throws IOException {
// time is inverted, hence we need to pass end-time for getting start key
List<Field<?>> scanStartKey = getScanKey(direction, endPoint, end);
// time is inverted, hence we need to pass start-time for getting end key
List<Field<?>> scanEndKey = getScanKey(direction, endPoint, start);
Map<Long, Set<ProgramRunId>> result = new LinkedHashMap<>();
try (CloseableIterator<StructuredRow> iterator = getEndpointChecksumTable().scan(Range.create(scanStartKey, Range.Bound.INCLUSIVE, scanEndKey, Range.Bound.INCLUSIVE), Integer.MAX_VALUE)) {
while (iterator.hasNext()) {
StructuredRow row = iterator.next();
long checksum = row.getLong(StoreDefinition.FieldLineageStore.CHECKSUM_FIELD);
ProgramRunId programRunId = GSON.fromJson(row.getString(StoreDefinition.FieldLineageStore.PROGRAM_RUN_FIELD), ProgramRunId.class);
Set<ProgramRunId> programRuns = result.computeIfAbsent(checksum, k -> new HashSet<>());
programRuns.add(programRunId);
}
}
return result;
}
use of io.cdap.cdap.spi.data.table.field.Field in project cdap by caskdata.
the class FieldLineageTable method getOperations.
private Set<ProgramRunOperations> getOperations(String direction, EndPoint endPoint, long start, long end) throws IOException {
Map<Long, Set<ProgramRunId>> checksumsWithProgramRunsInRange = getChecksumsWithProgramRunsInRange(direction, endPoint, start, end);
Set<ProgramRunOperations> result = new LinkedHashSet<>();
for (Map.Entry<Long, Set<ProgramRunId>> entry : checksumsWithProgramRunsInRange.entrySet()) {
long checksum = entry.getKey();
List<Field<?>> keys = getOperationsKey(checksum);
Optional<StructuredRow> row = getOperationsTable().read(keys);
if (!row.isPresent()) {
continue;
}
String value = row.get().getString(StoreDefinition.FieldLineageStore.OPERATIONS_FIELD);
Set<Operation> operations;
try {
operations = GSON.fromJson(value, SET_OPERATION_TYPE);
} catch (JsonSyntaxException e) {
LOG.warn(String.format("Failed to parse json from checksum %d'. Ignoring operations.", checksum));
continue;
}
if (operations != null) {
result.add(new ProgramRunOperations(entry.getValue(), operations));
}
}
return result;
}
Aggregations