Search in sources :

Example 1 with TableDefinition

use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.

the class DingoDdlExecutor method execute.

@SuppressWarnings({ "unused", "MethodMayBeStatic" })
public void execute(SqlCreateTable create, CalcitePrepare.Context context) {
    log.info("DDL execute: {}", create);
    final String tableName = getTableName(create.name, context);
    TableDefinition td = new TableDefinition(tableName);
    List<String> keyList = null;
    SqlNodeList columnList = create.columnList;
    if (columnList == null) {
        throw SqlUtil.newContextException(create.name.getParserPosition(), RESOURCE.createTableRequiresColumnList());
    }
    for (SqlNode sqlNode : create.columnList) {
        if (sqlNode instanceof SqlKeyConstraint) {
            SqlKeyConstraint constraint = (SqlKeyConstraint) sqlNode;
            if (constraint.getOperator().getKind() == SqlKind.PRIMARY_KEY) {
                // The 0th element is the name of the constraint
                keyList = ((SqlNodeList) constraint.getOperandList().get(1)).getList().stream().map(t -> ((SqlIdentifier) Objects.requireNonNull(t)).getSimple()).collect(Collectors.toList());
                break;
            }
        }
    }
    SqlValidator validator = new ContextSqlValidator(context, true);
    for (SqlNode sqlNode : create.columnList) {
        if (sqlNode.getKind() == SqlKind.COLUMN_DECL) {
            SqlColumnDeclaration scd = (SqlColumnDeclaration) sqlNode;
            ColumnDefinition cd = fromSqlColumnDeclaration(scd, validator, keyList);
            td.addColumn(cd);
        }
    }
    if (td.getColumns().stream().noneMatch(ColumnDefinition::isPrimary)) {
        throw new RuntimeException("Not have primary key!");
    }
    final MutableSchema schema = getSchema(context);
    if (schema.getTable(tableName) != null) {
        if (!create.ifNotExists) {
            // They did not specify IF NOT EXISTS, so give error.
            throw SqlUtil.newContextException(create.name.getParserPosition(), RESOURCE.tableExists(tableName));
        }
    }
    schema.createTable(tableName, td);
}
Also used : SqlValidator(org.apache.calcite.sql.validate.SqlValidator) ContextSqlValidator(org.apache.calcite.jdbc.ContextSqlValidator) TableDefinition(io.dingodb.common.table.TableDefinition) SqlNodeList(org.apache.calcite.sql.SqlNodeList) ContextSqlValidator(org.apache.calcite.jdbc.ContextSqlValidator) SqlColumnDeclaration(org.apache.calcite.sql.ddl.SqlColumnDeclaration) SqlKeyConstraint(org.apache.calcite.sql.ddl.SqlKeyConstraint) SqlNode(org.apache.calcite.sql.SqlNode) ColumnDefinition(io.dingodb.common.table.ColumnDefinition)

Example 2 with TableDefinition

use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.

the class MetaServiceClient method getAll.

private Map<String, TableDefinition> getAll() {
    return RemoteServerCaller.call(connector::newChannel, GET_ALL.message(META_SERVICE), buffer -> {
        int size = PrimitiveCodec.readZigZagInt(buffer);
        Map<String, TableDefinition> result = new HashMap<>(size);
        for (int i = 0; i < size; i++) {
            TableEntry entry = readTableEntry(buffer);
            tableEntries.put(entry.tableName(), entry);
            result.put(entry.tableName(), entry.tableDefinition());
        }
        return result;
    });
}
Also used : TableEntry(io.dingodb.server.protocol.proto.TableEntry) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) TableDefinition(io.dingodb.common.table.TableDefinition) PrimitiveCodec.encodeString(io.dingodb.common.codec.PrimitiveCodec.encodeString)

Example 3 with TableDefinition

use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.

the class MetaServiceHandler method onMessage.

private void onMessage(Message message, Channel channel) {
    ByteBuffer buffer = ByteBuffer.wrap(message.toBytes());
    MetaServiceCode code = MetaServiceCode.valueOf(PrimitiveCodec.readZigZagInt(buffer));
    switch(code) {
        case LISTENER_TABLE:
            // todo
            break;
        case REFRESH_TABLES:
            // todo
            break;
        case GET_TABLE:
            try {
                ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
                outputStream.write(encodeZigZagInt(ServerError.OK.getCode()));
                getAndEncodeTableEntry(readString(buffer), outputStream);
                channel.send(SimpleMessage.builder().content(outputStream.toByteArray()).build());
            } catch (IOException e) {
                log.error("Serialize/deserialize table info error.", e);
                channel.send(ServerError.IO.message());
            } catch (NullPointerException e) {
                channel.send(ServerError.TABLE_NOT_FOUND.message());
            }
            break;
        case CREATE_TABLE:
            try {
                String name = readString(buffer);
                TableDefinition definition = TableDefinition.fromJson(readString(buffer));
                metaService.createTable(name, definition);
                channel.send(ServerError.OK.message());
            } catch (IOException e) {
                log.error("Serialize/deserialize table info error.", e);
                channel.send(ServerError.IO.message());
            } catch (DingoException error) {
                channel.send(ServerError.message(error));
            }
            break;
        case DELETE_TABLE:
            try {
                // todo delete table data
                String name = readString(buffer);
                if (metaService.dropTable(name)) {
                    channel.send(ServerError.OK.message());
                } else {
                    channel.send(ServerError.UNKNOWN.message());
                }
            } catch (DingoException error) {
                channel.send(ServerError.message(error));
            }
            break;
        case GET_ALL:
            try {
                Map<String, TableDefinition> tableDefinitions = metaService.getTableDefinitions();
                ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
                byte[] size = encodeZigZagInt(tableDefinitions.size());
                outputStream.write(encodeZigZagInt(ServerError.OK.getCode()));
                outputStream.write(size);
                outputStream.flush();
                for (String name : tableDefinitions.keySet()) {
                    getAndEncodeTableEntry(name, outputStream);
                }
                channel.send(SimpleMessage.builder().content(outputStream.toByteArray()).build());
            } catch (IOException e) {
                log.error("Serialize/deserialize table info error.", e);
                channel.send(ServerError.IO.message());
            }
            break;
        default:
            channel.send(UNSUPPORTED_CODE.message());
    }
}
Also used : MetaServiceCode(io.dingodb.server.protocol.code.MetaServiceCode) DingoException(io.dingodb.common.error.DingoException) TableDefinition(io.dingodb.common.table.TableDefinition) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) PrimitiveCodec.encodeString(io.dingodb.common.codec.PrimitiveCodec.encodeString) PrimitiveCodec.readString(io.dingodb.common.codec.PrimitiveCodec.readString) ByteBuffer(java.nio.ByteBuffer)

Example 4 with TableDefinition

use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.

the class DingoJobVisitor method visit.

@Override
public Collection<Output> visit(@Nonnull DingoDistributedValues rel) {
    List<Output> outputs = new LinkedList<>();
    String tableName = getSimpleName(rel.getTable());
    final Map<String, Location> partLocations = Services.META.getPartLocations(tableName);
    final PartitionStrategy ps = new SimpleHashStrategy(partLocations.size());
    final TableDefinition td = Services.META.getTableDefinition(tableName);
    Map<String, List<Object[]>> partMap = ps.partTuples(rel.getValues(), td.getKeyMapping());
    for (Map.Entry<String, List<Object[]>> entry : partMap.entrySet()) {
        Object partId = entry.getKey();
        ValuesOperator operator = new ValuesOperator(entry.getValue());
        operator.setId(idGenerator.get());
        OutputHint hint = new OutputHint();
        hint.setPartId(partId);
        Location location = partLocations.get(partId);
        hint.setLocation(location);
        operator.getSoleOutput().setHint(hint);
        Task task = job.getOrCreate(location);
        task.putOperator(operator);
        outputs.addAll(operator.getOutputs());
    }
    return outputs;
}
Also used : Task(io.dingodb.exec.base.Task) LinkedList(java.util.LinkedList) OutputHint(io.dingodb.exec.base.OutputHint) Output(io.dingodb.exec.base.Output) TableDefinition(io.dingodb.common.table.TableDefinition) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) SimpleHashStrategy(io.dingodb.exec.partition.SimpleHashStrategy) ValuesOperator(io.dingodb.exec.operator.ValuesOperator) Map(java.util.Map) Location(io.dingodb.meta.Location) PartitionStrategy(io.dingodb.exec.partition.PartitionStrategy)

Example 5 with TableDefinition

use of io.dingodb.common.table.TableDefinition in project dingo by dingodb.

the class DingoJobVisitor method visit.

@Override
public Collection<Output> visit(@Nonnull DingoPartModify rel) {
    Collection<Output> inputs = dingo(rel.getInput()).accept(this);
    String tableName = getSimpleName(rel.getTable());
    List<Output> outputs = new LinkedList<>();
    TableDefinition td = Services.META.getTableDefinition(tableName);
    final TableId tableId = new TableId(Services.META.getTableKey(tableName));
    for (Output input : inputs) {
        Task task = input.getTask();
        Operator operator;
        switch(rel.getOperation()) {
            case INSERT:
                operator = new PartInsertOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping());
                break;
            case UPDATE:
                operator = new PartUpdateOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping(), TupleMapping.of(td.getColumnIndices(rel.getUpdateColumnList())), rel.getSourceExpressionList().stream().map(RexConverter::toString).collect(Collectors.toList()));
                break;
            case DELETE:
                operator = new PartDeleteOperator(tableId, input.getHint().getPartId(), td.getTupleSchema(), td.getKeyMapping());
                break;
            default:
                throw new IllegalStateException("Operation \"" + rel.getOperation() + "\" is not supported.");
        }
        operator.setId(idGenerator.get());
        task.putOperator(operator);
        input.setLink(operator.getInput(0));
        OutputHint hint = new OutputHint();
        hint.setToSumUp(true);
        operator.getSoleOutput().setHint(hint);
        outputs.addAll(operator.getOutputs());
    }
    return outputs;
}
Also used : TableId(io.dingodb.common.table.TableId) SumUpOperator(io.dingodb.exec.operator.SumUpOperator) AggregateOperator(io.dingodb.exec.operator.AggregateOperator) SortOperator(io.dingodb.exec.operator.SortOperator) RootOperator(io.dingodb.exec.operator.RootOperator) PartScanOperator(io.dingodb.exec.operator.PartScanOperator) SendOperator(io.dingodb.exec.operator.SendOperator) ReceiveOperator(io.dingodb.exec.operator.ReceiveOperator) ProjectOperator(io.dingodb.exec.operator.ProjectOperator) ValuesOperator(io.dingodb.exec.operator.ValuesOperator) PartUpdateOperator(io.dingodb.exec.operator.PartUpdateOperator) ReduceOperator(io.dingodb.exec.operator.ReduceOperator) Operator(io.dingodb.exec.base.Operator) GetByKeysOperator(io.dingodb.exec.operator.GetByKeysOperator) PartitionOperator(io.dingodb.exec.operator.PartitionOperator) PartDeleteOperator(io.dingodb.exec.operator.PartDeleteOperator) PartInsertOperator(io.dingodb.exec.operator.PartInsertOperator) CoalesceOperator(io.dingodb.exec.operator.CoalesceOperator) Task(io.dingodb.exec.base.Task) PartUpdateOperator(io.dingodb.exec.operator.PartUpdateOperator) PartDeleteOperator(io.dingodb.exec.operator.PartDeleteOperator) LinkedList(java.util.LinkedList) PartInsertOperator(io.dingodb.exec.operator.PartInsertOperator) OutputHint(io.dingodb.exec.base.OutputHint) Output(io.dingodb.exec.base.Output) TableDefinition(io.dingodb.common.table.TableDefinition)

Aggregations

TableDefinition (io.dingodb.common.table.TableDefinition)13 Output (io.dingodb.exec.base.Output)5 Task (io.dingodb.exec.base.Task)5 Location (io.dingodb.meta.Location)4 LinkedList (java.util.LinkedList)4 TableId (io.dingodb.common.table.TableId)3 PartitionStrategy (io.dingodb.exec.partition.PartitionStrategy)3 SimpleHashStrategy (io.dingodb.exec.partition.SimpleHashStrategy)3 ArrayList (java.util.ArrayList)3 Map (java.util.Map)3 PrimitiveCodec.encodeString (io.dingodb.common.codec.PrimitiveCodec.encodeString)2 OutputHint (io.dingodb.exec.base.OutputHint)2 GetByKeysOperator (io.dingodb.exec.operator.GetByKeysOperator)2 PartScanOperator (io.dingodb.exec.operator.PartScanOperator)2 PartitionOperator (io.dingodb.exec.operator.PartitionOperator)2 ValuesOperator (io.dingodb.exec.operator.ValuesOperator)2 IOException (java.io.IOException)2 List (java.util.List)2 BeforeAll (org.junit.jupiter.api.BeforeAll)2 DingoConventions (io.dingodb.calcite.DingoConventions)1