use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.
the class DingoDdlExecutor method execute.
@SuppressWarnings({ "unused", "MethodMayBeStatic" })
public void execute(SqlCreateTable create, CalcitePrepare.Context context) {
log.info("DDL execute: {}", create);
final String tableName = getTableName(create.name, context);
TableDefinition td = new TableDefinition(tableName);
List<String> keyList = null;
SqlNodeList columnList = create.columnList;
if (columnList == null) {
throw SqlUtil.newContextException(create.name.getParserPosition(), RESOURCE.createTableRequiresColumnList());
}
for (SqlNode sqlNode : create.columnList) {
if (sqlNode instanceof SqlKeyConstraint) {
SqlKeyConstraint constraint = (SqlKeyConstraint) sqlNode;
if (constraint.getOperator().getKind() == SqlKind.PRIMARY_KEY) {
// The 0th element is the name of the constraint
keyList = ((SqlNodeList) constraint.getOperandList().get(1)).getList().stream().map(t -> ((SqlIdentifier) Objects.requireNonNull(t)).getSimple()).collect(Collectors.toList());
break;
}
}
}
SqlValidator validator = new ContextSqlValidator(context, true);
for (SqlNode sqlNode : create.columnList) {
if (sqlNode.getKind() == SqlKind.COLUMN_DECL) {
SqlColumnDeclaration scd = (SqlColumnDeclaration) sqlNode;
ColumnDefinition cd = fromSqlColumnDeclaration(scd, validator, keyList);
td.addColumn(cd);
}
}
if (td.getColumns().stream().noneMatch(ColumnDefinition::isPrimary)) {
throw new RuntimeException("Not have primary key!");
}
final MutableSchema schema = getSchema(context);
if (schema.getTable(tableName) != null) {
if (!create.ifNotExists) {
// They did not specify IF NOT EXISTS, so give error.
throw SqlUtil.newContextException(create.name.getParserPosition(), RESOURCE.tableExists(tableName));
}
}
schema.createTable(tableName, td);
}
use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.
the class MetaServiceClient method getAll.
private Map<String, TableDefinition> getAll() {
return RemoteServerCaller.call(connector::newChannel, GET_ALL.message(META_SERVICE), buffer -> {
int size = PrimitiveCodec.readZigZagInt(buffer);
Map<String, TableDefinition> result = new HashMap<>(size);
for (int i = 0; i < size; i++) {
TableEntry entry = readTableEntry(buffer);
tableEntries.put(entry.tableName(), entry);
result.put(entry.tableName(), entry.tableDefinition());
}
return result;
});
}
use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.
the class MetaServiceHandler method onMessage.
private void onMessage(Message message, Channel channel) {
ByteBuffer buffer = ByteBuffer.wrap(message.toBytes());
MetaServiceCode code = MetaServiceCode.valueOf(PrimitiveCodec.readZigZagInt(buffer));
switch(code) {
case LISTENER_TABLE:
// todo
break;
case REFRESH_TABLES:
// todo
break;
case GET_TABLE:
try {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
outputStream.write(encodeZigZagInt(ServerError.OK.getCode()));
getAndEncodeTableEntry(readString(buffer), outputStream);
channel.send(SimpleMessage.builder().content(outputStream.toByteArray()).build());
} catch (IOException e) {
log.error("Serialize/deserialize table info error.", e);
channel.send(ServerError.IO.message());
} catch (NullPointerException e) {
channel.send(ServerError.TABLE_NOT_FOUND.message());
}
break;
case CREATE_TABLE:
try {
String name = readString(buffer);
TableDefinition definition = TableDefinition.fromJson(readString(buffer));
metaService.createTable(name, definition);
channel.send(ServerError.OK.message());
} catch (IOException e) {
log.error("Serialize/deserialize table info error.", e);
channel.send(ServerError.IO.message());
} catch (DingoException error) {
channel.send(ServerError.message(error));
}
break;
case DELETE_TABLE:
try {
// todo delete table data
String name = readString(buffer);
if (metaService.dropTable(name)) {
channel.send(ServerError.OK.message());
} else {
channel.send(ServerError.UNKNOWN.message());
}
} catch (DingoException error) {
channel.send(ServerError.message(error));
}
break;
case GET_ALL:
try {
Map<String, TableDefinition> tableDefinitions = metaService.getTableDefinitions();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] size = encodeZigZagInt(tableDefinitions.size());
outputStream.write(encodeZigZagInt(ServerError.OK.getCode()));
outputStream.write(size);
outputStream.flush();
for (String name : tableDefinitions.keySet()) {
getAndEncodeTableEntry(name, outputStream);
}
channel.send(SimpleMessage.builder().content(outputStream.toByteArray()).build());
} catch (IOException e) {
log.error("Serialize/deserialize table info error.", e);
channel.send(ServerError.IO.message());
}
break;
default:
channel.send(UNSUPPORTED_CODE.message());
}
}
use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.
the class DingoJobVisitor method visit.
@Override
public Collection<Output> visit(@Nonnull DingoDistributedValues rel) {
List<Output> outputs = new LinkedList<>();
String tableName = getSimpleName(rel.getTable());
final Map<String, Location> partLocations = Services.META.getPartLocations(tableName);
final PartitionStrategy ps = new SimpleHashStrategy(partLocations.size());
final TableDefinition td = Services.META.getTableDefinition(tableName);
Map<String, List<Object[]>> partMap = ps.partTuples(rel.getValues(), td.getKeyMapping());
for (Map.Entry<String, List<Object[]>> entry : partMap.entrySet()) {
Object partId = entry.getKey();
ValuesOperator operator = new ValuesOperator(entry.getValue());
operator.setId(idGenerator.get());
OutputHint hint = new OutputHint();
hint.setPartId(partId);
Location location = partLocations.get(partId);
hint.setLocation(location);
operator.getSoleOutput().setHint(hint);
Task task = job.getOrCreate(location);
task.putOperator(operator);
outputs.addAll(operator.getOutputs());
}
return outputs;
}
use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.
the class DingoJobVisitor method visit.
@Override
public Collection<Output> visit(@Nonnull DingoPartModify rel) {
Collection<Output> inputs = dingo(rel.getInput()).accept(this);
String tableName = getSimpleName(rel.getTable());
List<Output> outputs = new LinkedList<>();
TableDefinition td = Services.META.getTableDefinition(tableName);
final TableId tableId = new TableId(Services.META.getTableKey(tableName));
for (Output input : inputs) {
Task task = input.getTask();
Operator operator;
switch(rel.getOperation()) {
case INSERT:
operator = new PartInsertOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping());
break;
case UPDATE:
operator = new PartUpdateOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping(), TupleMapping.of(td.getColumnIndices(rel.getUpdateColumnList())), rel.getSourceExpressionList().stream().map(RexConverter::toString).collect(Collectors.toList()));
break;
case DELETE:
operator = new PartDeleteOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping());
break;
default:
throw new IllegalStateException("Operation \"" + rel.getOperation() + "\" is not supported.");
}
operator.setId(idGenerator.get());
task.putOperator(operator);
input.setLink(operator.getInput(0));
OutputHint hint = new OutputHint();
hint.setToSumUp(true);
operator.getSoleOutput().setHint(hint);
outputs.addAll(operator.getOutputs());
}
return outputs;
}
Aggregations