use of io.dingodb.common.table.TableId in project dingo by dingodb.
the class DingoJobVisitor method visit.
@Override
public Collection<Output> visit(@Nonnull DingoPartModify rel) {
Collection<Output> inputs = dingo(rel.getInput()).accept(this);
String tableName = getSimpleName(rel.getTable());
List<Output> outputs = new LinkedList<>();
TableDefinition td = Services.META.getTableDefinition(tableName);
final TableId tableId = new TableId(Services.META.getTableKey(tableName));
for (Output input : inputs) {
Task task = input.getTask();
Operator operator;
switch(rel.getOperation()) {
case INSERT:
operator = new PartInsertOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping());
break;
case UPDATE:
operator = new PartUpdateOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping(), TupleMapping.of(td.getColumnIndices(rel.getUpdateColumnList())), rel.getSourceExpressionList().stream().map(RexConverter::toString).collect(Collectors.toList()));
break;
case DELETE:
operator = new PartDeleteOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping());
break;
default:
throw new IllegalStateException("Operation \"" + rel.getOperation() + "\" is not supported.");
}
operator.setId(idGenerator.get());
task.putOperator(operator);
input.setLink(operator.getInput(0));
OutputHint hint = new OutputHint();
hint.setToSumUp(true);
operator.getSoleOutput().setHint(hint);
outputs.addAll(operator.getOutputs());
}
return outputs;
}
use of io.dingodb.common.table.TableId in project dingo by dingodb.
the class DingoJobVisitor method visit.
@Override
public Collection<Output> visit(@Nonnull DingoPartScan rel) {
String tableName = getSimpleName(rel.getTable());
TableDefinition td = Services.META.getTableDefinition(tableName);
Map<String, Location> parts = Services.META.getPartLocations(tableName);
List<Output> outputs = new ArrayList<>(parts.size());
TableId tableId = new TableId(Services.META.getTableKey(tableName));
String filterStr = null;
if (rel.getFilter() != null) {
filterStr = RexConverter.convert(rel.getFilter()).toString();
}
for (Map.Entry<String, Location> entry : parts.entrySet()) {
final Object partId = entry.getKey();
PartScanOperator operator = new PartScanOperator(tableId, entry.getKey(), td.getTupleSchema(), td.getKeyMapping(), filterStr, rel.getSelection());
operator.setId(idGenerator.get());
Task task = job.getOrCreate(entry.getValue());
task.putOperator(operator);
operator.getSoleOutput().setHint(OutputHint.of(tableName, partId));
outputs.addAll(operator.getOutputs());
}
return outputs;
}
use of io.dingodb.common.table.TableId in project dingo by dingodb.
the class DingoJobVisitor method visit.
@Override
public Collection<Output> visit(@Nonnull DingoGetByKeys rel) {
String tableName = getSimpleName(rel.getTable());
final Map<String, Location> partLocations = Services.META.getPartLocations(tableName);
final TableDefinition td = Services.META.getTableDefinition(tableName);
final PartitionStrategy ps = new SimpleHashStrategy(partLocations.size());
final TableId tableId = new TableId(Services.META.getTableKey(tableName));
Map<String, List<Object[]>> partMap = ps.partKeyTuples(rel.getKeyTuples());
List<Output> outputs = new LinkedList<>();
for (Map.Entry<String, List<Object[]>> entry : partMap.entrySet()) {
final Object partId = entry.getKey();
GetByKeysOperator operator = new GetByKeysOperator(tableId, partId, td.getTupleSchema(), td.getKeyMapping(), entry.getValue(), rel.getSelection());
operator.setId(idGenerator.get());
Task task = job.getOrCreate(partLocations.get(entry.getKey()));
task.putOperator(operator);
operator.getSoleOutput().setHint(OutputHint.of(tableName, partId));
outputs.addAll(operator.getOutputs());
}
return outputs;
}
use of io.dingodb.common.table.TableId in project dingo by dingodb.
the class MetaTestService method createTable.
@Override
public void createTable(@Nonnull String tableName, @Nonnull TableDefinition tableDefinition) {
try {
OutputStream os = new FileOutputStream(metaFile(tableName));
tableDefinition.writeJson(os);
// force reload
tableDefinitionMap = null;
Map<String, Location> partLocations = getPartLocations(tableName);
for (Map.Entry<String, Location> entry : partLocations.entrySet()) {
StoreInstance store = Services.KV_STORE.getInstance(entry.getValue().getPath());
new PartInKvStore(store.getKvBlock(new TableId(getTableKey(tableName)), entry.getKey()), tableDefinition.getTupleSchema(), tableDefinition.getKeyMapping());
}
} catch (IOException e) {
log.error("Failed to write table definition: {}", tableDefinition);
throw new AssertionError("Failed to write table definition.");
}
}
Aggregations