use of io.mycat.calcite.table.SchemaHandler in project Mycat2 by MyCATApache.
the class PrototypeHandlerImpl method showColumns.
@Override
public List<Object[]> showColumns(SQLShowColumnsStatement statement) {
ArrayList<Object[]> objects = new ArrayList<>();
String schemaName = SQLUtils.normalize(statement.getDatabase().getSimpleName());
String tableName = Optional.ofNullable((SQLExpr) statement.getTable()).map(i -> SQLUtils.normalize(i.toString())).orElse(null);
if (tableName == null && statement.getLike() != null) {
String pattern = SQLUtils.normalize(statement.getLike().toString());
PatternMatcher mysqlPattern = PatternMatcher.createMysqlPattern(pattern, false);
MetadataManager metadataManager = MetaClusterCurrent.wrapper(MetadataManager.class);
SchemaHandler schemaHandler = metadataManager.getSchemaMap().get(schemaName);
if (schemaHandler == null)
return Collections.emptyList();
List<String> tables = schemaHandler.logicTables().values().stream().map(i -> i.getTableName()).collect(Collectors.toList());
tableName = tables.stream().filter(i -> mysqlPattern.match(i)).findFirst().orElse(null);
}
if (tableName == null) {
return Collections.emptyList();
}
MetadataManager metadataManager = MetaClusterCurrent.wrapper(MetadataManager.class);
TableHandler table = metadataManager.getTable(schemaName, tableName);
if (table == null)
return Collections.emptyList();
String createTableSQL = table.getCreateTableSQL();
MySqlCreateTableStatement sqlStatement = (MySqlCreateTableStatement) SQLUtils.parseSingleMysqlStatement(createTableSQL);
List<SQLColumnDefinition> columns = new ArrayList<SQLColumnDefinition>();
List<String> dataTypes = new ArrayList<String>();
List<String> defaultValues = new ArrayList<String>();
int name_len = -1, dataType_len = -1, defaultVal_len = 7, extra_len = 5;
for (SQLTableElement element : sqlStatement.getTableElementList()) {
if (element instanceof SQLColumnDefinition) {
SQLColumnDefinition column = (SQLColumnDefinition) element;
columns.add(column);
String name = SQLUtils.normalize(column.getName().getSimpleName());
if (name_len < name.length()) {
name_len = name.length();
}
String dataType = column.getDataType().getName();
if (column.getDataType().getArguments().size() > 0) {
dataType += "(";
for (int i = 0; i < column.getDataType().getArguments().size(); i++) {
if (i != 0) {
dataType += ",";
}
SQLExpr arg = column.getDataType().getArguments().get(i);
dataType += arg.toString();
}
dataType += ")";
}
if (dataType_len < dataType.length()) {
dataType_len = dataType.length();
}
dataTypes.add(dataType);
if (column.getDefaultExpr() == null) {
defaultValues.add(null);
} else {
String defaultVal = SQLUtils.toMySqlString(column.getDefaultExpr());
if (defaultVal.length() > 2 && defaultVal.charAt(0) == '\'' && defaultVal.charAt(defaultVal.length() - 1) == '\'') {
defaultVal = defaultVal.substring(1, defaultVal.length() - 1);
}
defaultValues.add(defaultVal);
if (defaultVal_len < defaultVal.length()) {
defaultVal_len = defaultVal.length();
}
}
if (column.isAutoIncrement()) {
extra_len = "auto_increment".length();
} else if (column.getOnUpdate() != null) {
extra_len = "on update CURRENT_TIMESTAMP".length();
}
}
}
for (int i = 0; i < columns.size(); i++) {
SQLColumnDefinition column = columns.get(i);
String name = SQLUtils.normalize(column.getName().getSimpleName());
String Field = name;
String Type = dataTypes.get(i);
String Collation = Optional.ofNullable(column.getCollateExpr()).map(s -> SQLUtils.normalize(s.toString())).orElse(null);
String Null = column.containsNotNullConstaint() ? "NO" : "YES";
String Key = sqlStatement.isPrimaryColumn(name) ? "PRI" : sqlStatement.isUNI(name) ? "UNI" : sqlStatement.isMUL(name) ? "MUL" : "";
String Default = Optional.ofNullable(defaultValues.get(i)).orElse("NULL");
String Extra = "";
String Privileges = "select,insert,update,references";
String Comment = Optional.ofNullable(column.getComment()).map(s -> s.toString()).orElse("");
if (statement.isFull()) {
objects.add(new Object[] { Field, Type, Collation, Null, Key, Default, Extra, Privileges, Comment });
} else {
objects.add(new Object[] { Field, Type, Null, Key, Default, Extra });
}
}
return objects;
}
use of io.mycat.calcite.table.SchemaHandler in project Mycat2 by MyCATApache.
the class PrototypeHandlerImpl method showTables.
@Override
public List<Object[]> showTables(SQLShowTablesStatement statement) {
String schemaName = SQLUtils.normalize(statement.getDatabase().getSimpleName());
MetadataManager metadataManager = MetaClusterCurrent.wrapper(MetadataManager.class);
NameMap<SchemaHandler> schemaMap = metadataManager.getSchemaMap();
SchemaHandler schemaHandler = schemaMap.get(schemaName);
if (schemaHandler == null)
return Collections.emptyList();
Collection<String> strings = schemaHandler.logicTables().values().stream().map(i -> i.getTableName()).collect(Collectors.toList());
;
SQLExpr like = statement.getLike();
if (like == null) {
NameMap<TableHandler> tables = schemaHandler.logicTables();
strings = tables.keySet();
} else if (like instanceof SQLTextLiteralExpr) {
PatternMatcher matcher = PatternMatcher.createMysqlPattern(((SQLTextLiteralExpr) like).getText(), false);
strings = strings.stream().filter(i -> matcher.match(i)).collect(Collectors.toList());
}
if (statement.isFull()) {
return strings.stream().sorted().map(i -> new Object[] { i, "BASE TABLE" }).collect(Collectors.toList());
} else {
return strings.stream().sorted().map(i -> new Object[] { i }).collect(Collectors.toList());
}
}
use of io.mycat.calcite.table.SchemaHandler in project Mycat2 by MyCATApache.
the class DrdsRunnerHelper method convertRoSchemaPlus.
public static SchemaPlus convertRoSchemaPlus(NameMap<SchemaHandler> schemaHandlers) {
SchemaPlus plus = CalciteSchema.createRootSchema(false).plus();
List<MycatSchema> schemas = new ArrayList<>();
for (Map.Entry<String, SchemaHandler> entry : schemaHandlers.entrySet()) {
String schemaName = entry.getKey();
SchemaHandler schemaHandler = entry.getValue();
Map<String, Table> logicTableMap = new HashMap<>();
for (TableHandler tableHandler : schemaHandler.logicTables().values()) {
MycatLogicTable logicTable = new MycatLogicTable(tableHandler);
logicTableMap.put(logicTable.getTable().getTableName(), logicTable);
}
MycatSchema schema = MycatSchema.create(schemaName, logicTableMap);
plus.add(schemaName, schema);
schemas.add(schema);
}
return plus;
}
use of io.mycat.calcite.table.SchemaHandler in project Mycat2 by MyCATApache.
the class PrototypeHandlerImpl method showTableStatus.
@Override
public List<Object[]> showTableStatus(MySqlShowTableStatusStatement statement) {
String database = SQLUtils.normalize(statement.getDatabase().getSimpleName());
SQLExpr like = statement.getLike();
PatternMatcher matcher = PatternMatcher.createMysqlPattern(SQLUtils.normalize(like.toString()), false);
MetadataManager metadataManager = MetaClusterCurrent.wrapper(MetadataManager.class);
SchemaHandler schemaHandler = metadataManager.getSchemaMap().get(database);
if (schemaHandler == null)
return Collections.emptyList();
return schemaHandler.logicTables().values().stream().map(i -> i.getTableName()).filter(i -> matcher.match(i)).map(tableName -> {
String Name = tableName;
String Engine = "InnoDB";
String Version = "10";
String Row_format = "Dynamic";
String Rows = "0";
String Avg_row_length = "0";
String Data_length = "800000";
String Max_data_length = "0";
String Index_length = "800000";
String Data_free = "0";
String Auto_increment = null;
String Create_time = "2021-10-26 14:32:03";
String Update_time = null;
String Check_time = null;
String Collation = "utf8_general_ci";
String Checksum = null;
String Create_options = null;
String Comment = "";
return new Object[] { tableName, Name, Engine, Version, Row_format, Rows, Avg_row_length, Data_length, Max_data_length, Index_length, Data_free, Auto_increment, Create_time, Update_time, Check_time, Collation, Checksum, Create_options, Comment };
}).collect(Collectors.toList());
}
use of io.mycat.calcite.table.SchemaHandler in project Mycat2 by MyCATApache.
the class HintHandler method onExecute.
@Override
protected Future<Void> onExecute(SQLRequest<MySqlHintStatement> request, MycatDataContext dataContext, Response response) {
MySqlHintStatement ast = request.getAst();
List<SQLCommentHint> hints = ast.getHints();
try {
if (hints.size() == 1) {
String s = SqlHints.unWrapperHint(hints.get(0).getText());
if (s.startsWith("mycat:") || s.startsWith("MYCAT:")) {
s = s.substring(6);
int bodyStartIndex = s.indexOf('{');
String cmd;
String body;
if (bodyStartIndex == -1) {
cmd = s;
body = "{}";
} else {
cmd = s.substring(0, bodyStartIndex);
body = s.substring(bodyStartIndex);
}
cmd = cmd.trim();
MetadataManager metadataManager = MetaClusterCurrent.wrapper(MetadataManager.class);
MycatRouterConfig routerConfig = MetaClusterCurrent.wrapper(MycatRouterConfig.class);
ReplicaSelectorManager replicaSelectorRuntime = MetaClusterCurrent.wrapper(ReplicaSelectorManager.class);
JdbcConnectionManager jdbcConnectionManager = MetaClusterCurrent.wrapper(JdbcConnectionManager.class);
MycatServer mycatServer = MetaClusterCurrent.wrapper(MycatServer.class);
if ("showErGroup".equalsIgnoreCase(cmd)) {
return showErGroup(response, metadataManager);
}
if ("loaddata".equalsIgnoreCase(cmd)) {
return loaddata(dataContext, response, body, metadataManager);
}
if ("setUserDialect".equalsIgnoreCase(cmd)) {
return setUserDialect(response, body);
}
if ("showSlowSql".equalsIgnoreCase(cmd)) {
return showSlowSql(response, body);
}
if ("showTopology".equalsIgnoreCase(cmd)) {
return showTopology(response, body, metadataManager);
}
if ("checkConfigConsistency".equalsIgnoreCase(cmd)) {
StorageManager assembleMetadataStorageManager = MetaClusterCurrent.wrapper(StorageManager.class);
boolean res = assembleMetadataStorageManager.checkConfigConsistency();
ResultSetBuilder resultSetBuilder = ResultSetBuilder.create();
resultSetBuilder.addColumnInfo("value", JDBCType.VARCHAR);
resultSetBuilder.addObjectRowPayload(Arrays.asList(res ? 1 : 0));
return response.sendResultSet(resultSetBuilder.build());
}
if ("resetConfig".equalsIgnoreCase(cmd)) {
MycatRouterConfigOps ops = ConfigUpdater.getOps();
ops.reset();
ops.commit();
return response.sendOk();
}
if ("run".equalsIgnoreCase(cmd)) {
Map<String, Object> map = JsonUtil.from(body, Map.class);
String hbt = Objects.toString(map.get("hbt"));
DrdsSqlCompiler drdsRunner = MetaClusterCurrent.wrapper(DrdsSqlCompiler.class);
Plan plan = drdsRunner.doHbt(hbt);
AsyncMycatDataContextImpl.HbtMycatDataContextImpl sqlMycatDataContext = new AsyncMycatDataContextImpl.HbtMycatDataContextImpl(dataContext, plan.getCodeExecuterContext());
ArrayBindable arrayBindable = MetaClusterCurrent.wrapper(ExecutorProvider.class).prepare(plan).getArrayBindable();
Observable<MysqlPayloadObject> mysqlPayloadObjectObservable = PrepareExecutor.getMysqlPayloadObjectObservable(arrayBindable, sqlMycatDataContext, plan.getMetaData());
return response.sendResultSet(mysqlPayloadObjectObservable);
}
if ("killThread".equalsIgnoreCase(cmd)) {
KillThreadHint killThreadHint = JsonUtil.from(body, KillThreadHint.class);
long pid = killThreadHint.getId();
dataContext.setAffectedRows(IOExecutor.kill(pid) ? 1 : 0);
return response.sendOk();
}
if ("interruptThread".equalsIgnoreCase(cmd)) {
Thread.currentThread().interrupt();
InterruptThreadHint interruptThreadHint = JsonUtil.from(body, InterruptThreadHint.class);
long pid = interruptThreadHint.getId();
dataContext.setAffectedRows(IOExecutor.interrupt(pid) ? 1 : 0);
return response.sendOk();
}
if ("showThreadInfo".equalsIgnoreCase(cmd)) {
ResultSetBuilder builder = ResultSetBuilder.create();
builder.addColumnInfo("ID", JDBCType.VARCHAR);
builder.addColumnInfo("NAME", JDBCType.VARCHAR);
builder.addColumnInfo("STATE", JDBCType.VARCHAR);
builder.addColumnInfo("STACKTRACE", JDBCType.VARCHAR);
List<Thread> threads = IOExecutor.findAllThreads();
for (Thread thread : threads) {
String name = thread.getName();
long id = thread.getId();
String state = thread.getState().name();
StackTraceElement[] stackTrace = thread.getStackTrace();
StringWriter stringWriter = new StringWriter();
for (StackTraceElement traceElement : stackTrace) {
stringWriter.write("\tat " + traceElement);
}
String stackTraceText = stringWriter.toString();
builder.addObjectRowPayload(Arrays.asList(id, name, state, stackTraceText));
}
return response.sendResultSet(builder.build());
}
if ("createSqlCache".equalsIgnoreCase(cmd)) {
MycatRouterConfigOps ops = ConfigUpdater.getOps();
SQLStatement sqlStatement = null;
if (ast.getHintStatements() != null && ast.getHintStatements().size() == 1) {
sqlStatement = ast.getHintStatements().get(0);
}
SqlCacheConfig sqlCacheConfig = JsonUtil.from(body, SqlCacheConfig.class);
if (sqlCacheConfig.getSql() == null && sqlStatement != null) {
sqlCacheConfig.setSql(sqlStatement.toString());
}
ops.putSqlCache(sqlCacheConfig);
ops.commit();
if (sqlStatement == null) {
String sql = sqlCacheConfig.getSql();
sqlStatement = SQLUtils.parseSingleMysqlStatement(sql);
}
return MycatdbCommand.execute(dataContext, response, sqlStatement);
}
if ("showSqlCaches".equalsIgnoreCase(cmd)) {
ResultSetBuilder resultSetBuilder = ResultSetBuilder.create();
resultSetBuilder.addColumnInfo("info", JDBCType.VARCHAR);
if (MetaClusterCurrent.exist(SqlResultSetService.class)) {
SqlResultSetService sqlResultSetService = MetaClusterCurrent.wrapper(SqlResultSetService.class);
sqlResultSetService.snapshot().toStringList().forEach(c -> resultSetBuilder.addObjectRowPayload(Arrays.asList(c)));
}
return response.sendResultSet(resultSetBuilder.build());
}
if ("dropSqlCache".equalsIgnoreCase(cmd)) {
MycatRouterConfigOps ops = ConfigUpdater.getOps();
SqlCacheConfig sqlCacheConfig = JsonUtil.from(body, SqlCacheConfig.class);
ops.removeSqlCache(sqlCacheConfig.getName());
ops.commit();
return response.sendOk();
}
if ("showBufferUsage".equalsIgnoreCase(cmd)) {
return response.sendResultSet(mycatServer.showBufferUsage(dataContext.getSessionId()));
}
if ("showUsers".equalsIgnoreCase(cmd)) {
ResultSetBuilder builder = ResultSetBuilder.create();
builder.addColumnInfo("username", JDBCType.VARCHAR);
builder.addColumnInfo("ip", JDBCType.VARCHAR);
builder.addColumnInfo("transactionType", JDBCType.VARCHAR);
builder.addColumnInfo("dbType", JDBCType.VARCHAR);
Authenticator authenticator = MetaClusterCurrent.wrapper(Authenticator.class);
List<UserConfig> userConfigs = authenticator.getConfigAsList();
for (UserConfig userConfig : userConfigs) {
builder.addObjectRowPayload(Arrays.asList(userConfig.getUsername(), userConfig.getIp(), userConfig.getTransactionType(), userConfig.getDialect()));
}
return response.sendResultSet(() -> builder.build());
}
if ("showSchemas".equalsIgnoreCase(cmd)) {
Map map = JsonUtil.from(body, Map.class);
String schemaName = (String) map.get("schemaName");
Collection<SchemaHandler> schemaHandlers;
if (schemaName == null) {
schemaHandlers = metadataManager.getSchemaMap().values();
} else {
schemaHandlers = Collections.singletonList(metadataManager.getSchemaMap().get(schemaName));
}
ResultSetBuilder builder = ResultSetBuilder.create();
builder.addColumnInfo("SCHEMA_NAME", JDBCType.VARCHAR).addColumnInfo("DEFAULT_TARGET_NAME", JDBCType.VARCHAR).addColumnInfo("TABLE_NAMES", JDBCType.VARCHAR);
for (SchemaHandler value : schemaHandlers) {
String SCHEMA_NAME = value.getName();
String DEFAULT_TARGET_NAME = value.defaultTargetName();
String TABLE_NAMES = String.join(",", value.logicTables().keySet());
builder.addObjectRowPayload(Arrays.asList(SCHEMA_NAME, DEFAULT_TARGET_NAME, TABLE_NAMES));
}
return response.sendResultSet(() -> builder.build());
}
if ("showTables".equalsIgnoreCase(cmd)) {
return showTables(response, body, metadataManager, routerConfig);
}
if ("setSqlTimeFilter".equalsIgnoreCase(cmd)) {
return setSqlTimeFilter(response, body, metadataManager);
}
if ("getSqlTimeFilter".equalsIgnoreCase(cmd)) {
return getSqlTimeFilter(response, body, metadataManager);
}
if ("showClusters".equalsIgnoreCase(cmd)) {
Map map = JsonUtil.from(body, Map.class);
String clusterName = (String) map.get("name");
RowBaseIterator rowBaseIterator = showClusters(clusterName);
return response.sendResultSet(rowBaseIterator);
}
if ("showNativeDataSources".equalsIgnoreCase(cmd)) {
return response.sendResultSet(mycatServer.showNativeDataSources());
}
if ("showDataSources".equalsIgnoreCase(cmd)) {
Optional<JdbcConnectionManager> connectionManager = Optional.ofNullable(jdbcConnectionManager);
Collection<JdbcDataSource> jdbcDataSources = new HashSet<>(connectionManager.map(i -> i.getDatasourceInfo()).map(i -> i.values()).orElse(Collections.emptyList()));
ResultSetBuilder resultSetBuilder = ResultSetBuilder.create();
resultSetBuilder.addColumnInfo("NAME", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("USERNAME", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("PASSWORD", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("MAX_CON", JDBCType.BIGINT);
resultSetBuilder.addColumnInfo("MIN_CON", JDBCType.BIGINT);
resultSetBuilder.addColumnInfo("EXIST_CON", JDBCType.BIGINT);
resultSetBuilder.addColumnInfo("USE_CON", JDBCType.BIGINT);
resultSetBuilder.addColumnInfo("MAX_RETRY_COUNT", JDBCType.BIGINT);
resultSetBuilder.addColumnInfo("MAX_CONNECT_TIMEOUT", JDBCType.BIGINT);
resultSetBuilder.addColumnInfo("DB_TYPE", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("URL", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("WEIGHT", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("INIT_SQL", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("INIT_SQL_GET_CONNECTION", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("INSTANCE_TYPE", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("IDLE_TIMEOUT", JDBCType.BIGINT);
resultSetBuilder.addColumnInfo("DRIVER", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("TYPE", JDBCType.VARCHAR);
resultSetBuilder.addColumnInfo("IS_MYSQL", JDBCType.VARCHAR);
for (JdbcDataSource jdbcDataSource : jdbcDataSources) {
DatasourceConfig config = jdbcDataSource.getConfig();
String NAME = config.getName();
String USERNAME = config.getUser();
String PASSWORD = config.getPassword();
int MAX_CON = config.getMaxCon();
int MIN_CON = config.getMinCon();
// 注意显示顺序
int USED_CON = jdbcDataSource.getUsedCount();
// jdbc连接池已经存在连接数量是内部状态,未知
int EXIST_CON = USED_CON;
int MAX_RETRY_COUNT = config.getMaxRetryCount();
long MAX_CONNECT_TIMEOUT = config.getMaxConnectTimeout();
String DB_TYPE = config.getDbType();
String URL = config.getUrl();
int WEIGHT = config.getWeight();
String INIT_SQL = Optional.ofNullable(config.getInitSqls()).map(o -> String.join(";", o)).orElse("");
boolean INIT_SQL_GET_CONNECTION = config.isInitSqlsGetConnection();
String INSTANCE_TYPE = Optional.ofNullable(replicaSelectorRuntime.getPhysicsInstanceByName(NAME)).map(i -> i.getType().name()).orElse(config.getInstanceType());
long IDLE_TIMEOUT = config.getIdleTimeout();
// 保留属性
String DRIVER = jdbcDataSource.getDataSource().toString();
String TYPE = config.getType();
boolean IS_MYSQL = jdbcDataSource.isMySQLType();
resultSetBuilder.addObjectRowPayload(Arrays.asList(NAME, USERNAME, PASSWORD, MAX_CON, MIN_CON, EXIST_CON, USED_CON, MAX_RETRY_COUNT, MAX_CONNECT_TIMEOUT, DB_TYPE, URL, WEIGHT, INIT_SQL, INIT_SQL_GET_CONNECTION, INSTANCE_TYPE, IDLE_TIMEOUT, DRIVER, TYPE, IS_MYSQL));
}
return response.sendResultSet(() -> resultSetBuilder.build());
}
if ("showHeartbeats".equalsIgnoreCase(cmd)) {
RowBaseIterator rowBaseIterator = showHeatbeatStat();
return response.sendResultSet(rowBaseIterator);
}
if ("showHeartbeatStatus".equalsIgnoreCase(cmd)) {
ResultSetBuilder builder = ResultSetBuilder.create();
builder.addColumnInfo("name", JDBCType.VARCHAR);
builder.addColumnInfo("status", JDBCType.VARCHAR);
Map<String, HeartbeatFlow> heartbeatDetectorMap = replicaSelectorRuntime.getHeartbeatDetectorMap();
for (Map.Entry<String, HeartbeatFlow> entry : heartbeatDetectorMap.entrySet()) {
String key = entry.getKey();
HeartbeatFlow value = entry.getValue();
builder.addObjectRowPayload(Arrays.asList(Objects.toString(key), Objects.toString(value.getDsStatus())));
}
return response.sendResultSet(() -> builder.build());
}
if ("showInstances".equalsIgnoreCase(cmd)) {
RowBaseIterator rowBaseIterator = showInstances();
return response.sendResultSet(rowBaseIterator);
}
if ("showReactors".equalsIgnoreCase(cmd)) {
MycatServer server = MetaClusterCurrent.wrapper(MycatServer.class);
return response.sendResultSet(server.showReactors());
}
if ("showThreadPools".equalsIgnoreCase(cmd)) {
ResultSetBuilder builder = ResultSetBuilder.create();
builder.addColumnInfo("NAME", JDBCType.VARCHAR).addColumnInfo("POOL_SIZE", JDBCType.BIGINT).addColumnInfo("ACTIVE_COUNT", JDBCType.BIGINT).addColumnInfo("TASK_QUEUE_SIZE", JDBCType.BIGINT).addColumnInfo("COMPLETED_TASK", JDBCType.BIGINT).addColumnInfo("TOTAL_TASK", JDBCType.BIGINT);
return response.sendResultSet(() -> builder.build());
}
if ("showNativeBackends".equalsIgnoreCase(cmd)) {
MycatServer server = MetaClusterCurrent.wrapper(MycatServer.class);
return response.sendResultSet(server.showNativeBackends());
}
if ("showConnections".equalsIgnoreCase(cmd)) {
MycatServer server = MetaClusterCurrent.wrapper(MycatServer.class);
return response.sendResultSet(server.showConnections());
}
if ("showSchedules".equalsIgnoreCase(cmd)) {
ResultSetBuilder builder = ResultSetBuilder.create();
ScheduledExecutorService timer = ScheduleUtil.getTimer();
String NAME = timer.toString();
boolean IS_TERMINATED = timer.isTerminated();
boolean IS_SHUTDOWN = timer.isShutdown();
int SCHEDULE_COUNT = ScheduleUtil.getScheduleCount();
builder.addColumnInfo("NAME", JDBCType.VARCHAR).addColumnInfo("IS_TERMINATED", JDBCType.VARCHAR).addColumnInfo("IS_SHUTDOWN", JDBCType.VARCHAR).addColumnInfo("SCHEDULE_COUNT", JDBCType.BIGINT);
builder.addObjectRowPayload(Arrays.asList(NAME, IS_TERMINATED, IS_SHUTDOWN, SCHEDULE_COUNT));
return response.sendResultSet(() -> builder.build());
}
if ("showBaselines".equalsIgnoreCase(cmd)) {
ResultSetBuilder builder = ResultSetBuilder.create();
QueryPlanCache queryPlanCache = MetaClusterCurrent.wrapper(QueryPlanCache.class);
builder.addColumnInfo("BASELINE_ID", JDBCType.VARCHAR).addColumnInfo("PARAMETERIZED_SQL", JDBCType.VARCHAR).addColumnInfo("PLAN_ID", JDBCType.VARCHAR).addColumnInfo("EXTERNALIZED_PLAN", JDBCType.VARCHAR).addColumnInfo("FIXED", JDBCType.VARCHAR).addColumnInfo("ACCEPTED", JDBCType.VARCHAR);
for (Baseline baseline : queryPlanCache.list()) {
for (BaselinePlan baselinePlan : baseline.getPlanList()) {
String BASELINE_ID = String.valueOf(baselinePlan.getBaselineId());
String PARAMETERIZED_SQL = String.valueOf(baselinePlan.getSql());
String PLAN_ID = String.valueOf(baselinePlan.getId());
CodeExecuterContext attach = (CodeExecuterContext) baselinePlan.attach();
String EXTERNALIZED_PLAN = new PlanImpl(attach.getMycatRel(), attach, Collections.emptyList()).dumpPlan();
String FIXED = Optional.ofNullable(baseline.getFixPlan()).filter(i -> i.getId() == baselinePlan.getId()).map(u -> "true").orElse("false");
String ACCEPTED = "true";
builder.addObjectRowPayload(Arrays.asList(BASELINE_ID, PARAMETERIZED_SQL, PLAN_ID, EXTERNALIZED_PLAN, FIXED, ACCEPTED));
}
}
return response.sendResultSet(() -> builder.build());
}
if ("showConfigText".equalsIgnoreCase(cmd)) {
MycatRouterConfig mycatRouterConfig = MetaClusterCurrent.wrapper(MycatRouterConfig.class);
String text = JsonUtil.toJson(mycatRouterConfig);
ResultSetBuilder builder = ResultSetBuilder.create();
builder.addColumnInfo("CONFIG_TEXT", JDBCType.VARCHAR);
builder.addObjectRowPayload(Arrays.asList(text));
return response.sendResultSet(builder.build());
}
if ("setBkaJoin".equalsIgnoreCase(cmd)) {
DrdsSqlCompiler.RBO_BKA_JOIN = body.contains("1");
return response.sendOk();
}
if ("setSortMergeJoin".equalsIgnoreCase(cmd)) {
DrdsSqlCompiler.RBO_MERGE_JOIN = body.contains("1");
return response.sendOk();
}
if ("setAcceptConnect".equalsIgnoreCase(cmd)) {
boolean contains = body.contains("1");
MycatServer server = MetaClusterCurrent.wrapper(MycatServer.class);
if (!contains) {
server.stopAcceptConnect();
} else {
server.resumeAcceptConnect();
}
dataContext.setAffectedRows(1);
return response.sendOk();
}
if ("setReadyToCloseSQL".equalsIgnoreCase(cmd)) {
ReadyToCloseSQLHint readyToCloseSQLHint = JsonUtil.from(body, ReadyToCloseSQLHint.class);
String sql = readyToCloseSQLHint.getSql().trim();
MycatServer server = MetaClusterCurrent.wrapper(MycatServer.class);
server.setReadyToCloseSQL(sql);
dataContext.setAffectedRows(1);
return response.sendOk();
}
if ("setDebug".equalsIgnoreCase(cmd)) {
boolean contains = body.contains("1");
dataContext.setDebug(contains);
return response.sendOk();
}
if ("setVector".equalsIgnoreCase(cmd)) {
boolean contains = body.contains("1");
dataContext.setVector(contains);
return response.sendOk();
}
if ("is".equalsIgnoreCase(cmd)) {
ResultSetBuilder builder = ResultSetBuilder.create();
builder.addColumnInfo("value", JDBCType.VARCHAR);
if (body.contains("debug")) {
boolean debug = dataContext.isDebug();
DrdsSqlCompiler.DEBUG = debug;
builder.addObjectRowPayload(Arrays.asList(debug ? "1" : "0"));
}
return response.sendResultSet(builder.build());
}
if ("setBkaJoinLeftRowCountLimit".equalsIgnoreCase(cmd)) {
DrdsSqlCompiler.BKA_JOIN_LEFT_ROW_COUNT_LIMIT = Long.parseLong(body.substring(1, body.length() - 1));
return response.sendOk();
}
if ("baseline".equalsIgnoreCase(cmd)) {
Map<String, Object> map = JsonUtil.from(body, Map.class);
String command = Objects.requireNonNull(map.get("command")).toString().toLowerCase();
long value = Long.parseLong((map.getOrDefault("value", "0")).toString());
QueryPlanCache queryPlanCache = MetaClusterCurrent.wrapper(QueryPlanCache.class);
switch(command) {
case "showAllPlans":
{
ResultSetBuilder builder = ResultSetBuilder.create();
builder.addColumnInfo("BASELINE_ID", JDBCType.VARCHAR).addColumnInfo("PARAMETERIZED_SQL", JDBCType.VARCHAR).addColumnInfo("PLAN_ID", JDBCType.VARCHAR).addColumnInfo("EXTERNALIZED_PLAN", JDBCType.VARCHAR).addColumnInfo("FIXED", JDBCType.VARCHAR).addColumnInfo("ACCEPTED", JDBCType.VARCHAR);
for (Baseline baseline : queryPlanCache.list()) {
for (BaselinePlan baselinePlan : baseline.getPlanList()) {
String BASELINE_ID = String.valueOf(baselinePlan.getBaselineId());
String PARAMETERIZED_SQL = String.valueOf(baselinePlan.getSql());
String PLAN_ID = String.valueOf(baselinePlan.getId());
CodeExecuterContext attach = (CodeExecuterContext) baselinePlan.attach();
String EXTERNALIZED_PLAN = new PlanImpl(attach.getMycatRel(), attach, Collections.emptyList()).dumpPlan();
String FIXED = Optional.ofNullable(baseline.getFixPlan()).filter(i -> i.getId() == baselinePlan.getId()).map(u -> "true").orElse("false");
String ACCEPTED = "true";
builder.addObjectRowPayload(Arrays.asList(BASELINE_ID, PARAMETERIZED_SQL, PLAN_ID, EXTERNALIZED_PLAN, FIXED, ACCEPTED));
}
}
return response.sendResultSet(() -> builder.build());
}
case "persistAllBaselines":
{
queryPlanCache.saveBaselines();
return response.sendOk();
}
case "loadBaseline":
{
queryPlanCache.loadBaseline(value);
return response.sendOk();
}
case "loadPlan":
{
queryPlanCache.loadPlan(value);
return response.sendOk();
}
case "persistPlan":
{
queryPlanCache.persistPlan(value);
return response.sendOk();
}
case "clearBaseline":
{
queryPlanCache.clearBaseline(value);
return response.sendOk();
}
case "clearPlan":
{
queryPlanCache.clearPlan(value);
return response.sendOk();
}
case "deleteBaseline":
{
queryPlanCache.deleteBaseline(value);
return response.sendOk();
}
case "deletePlan":
{
queryPlanCache.deletePlan(value);
return response.sendOk();
}
case "add":
case "fix":
{
SQLStatement sqlStatement = null;
if (ast.getHintStatements() != null && ast.getHintStatements().size() == 1) {
sqlStatement = ast.getHintStatements().get(0);
DrdsSqlWithParams drdsSqlWithParams = DrdsRunnerHelper.preParse(sqlStatement, dataContext.getDefaultSchema());
queryPlanCache.add("fix".equalsIgnoreCase(command), drdsSqlWithParams);
}
return response.sendOk();
}
default:
throw new UnsupportedOperationException();
}
}
if ("MIGRATE_LIST".equalsIgnoreCase(cmd)) {
return response.sendResultSet(MigrateUtil.list());
}
if ("MIGRATE_STOP".equalsIgnoreCase(cmd)) {
MigrateStopHint hint = JsonUtil.from(body, MigrateStopHint.class);
if (MigrateUtil.stop(hint.getId())) {
dataContext.setAffectedRows(1);
}
return response.sendOk();
}
if ("MIGRATE".equalsIgnoreCase(cmd)) {
MigrateHint migrateHint = JsonUtil.from(body, MigrateHint.class);
String name = migrateHint.getName();
MigrateHint.Input input = migrateHint.getInput();
MigrateHint.Output output = migrateHint.getOutput();
int parallelism = output.getParallelism();
MigrateUtil.MigrateJdbcOutput migrateJdbcOutput = new MigrateUtil.MigrateJdbcOutput();
migrateJdbcOutput.setParallelism(parallelism);
migrateJdbcOutput.setBatch(output.getBatch());
List<MigrateUtil.MigrateJdbcInput> migrateJdbcInputs = new ArrayList<>();
List<Flowable<Object[]>> observables = new ArrayList<>();
MetadataManager manager = MetaClusterCurrent.wrapper(MetadataManager.class);
TableHandler outputTable = manager.getTable(output.getSchemaName(), output.getTableName());
String username = Optional.ofNullable(output.getUsername()).orElseGet(new Supplier<String>() {
@Override
public String get() {
UserConfig userConfig = routerConfig.getUsers().get(0);
String username = userConfig.getUsername();
String password = userConfig.getPassword();
return username;
}
});
String password = Optional.ofNullable(output.getUsername()).orElseGet(new Supplier<String>() {
@Override
public String get() {
UserConfig userConfig = routerConfig.getUsers().get(0);
String username = userConfig.getUsername();
String password = userConfig.getPassword();
return password;
}
});
String url = Optional.ofNullable(output.getUrl()).orElseGet(() -> {
ServerConfig serverConfig = MetaClusterCurrent.wrapper(ServerConfig.class);
String ip = serverConfig.getIp();
int port = serverConfig.getPort();
return "jdbc:mysql://" + ip + ":" + port + "/mysql?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true";
});
if (input.getUrl() != null) {
String sql = input.getSql();
long count = input.getCount();
Map<String, String> properties = input.getProperties();
MigrateUtil.MigrateJdbcInput migrateJdbcInput = new MigrateUtil.MigrateJdbcInput();
migrateJdbcInput.setCount(count);
observables.add(MigrateUtil.read(migrateJdbcInput, input.getUrl(), input.getUsername(), input.getPassword(), sql));
} else if (input.getType() == null || "mycat".equalsIgnoreCase(input.getType())) {
TableHandler inputTable = manager.getTable(input.getSchemaName(), input.getTableName());
switch(inputTable.getType()) {
case SHARDING:
{
ShardingTable shardingTable = (ShardingTable) inputTable;
for (Partition backend : shardingTable.getBackends()) {
MigrateUtil.MigrateJdbcInput migrateJdbcInput = new MigrateUtil.MigrateJdbcInput();
migrateJdbcInputs.add(migrateJdbcInput);
observables.add(MigrateUtil.read(migrateJdbcInput, backend));
}
break;
}
case GLOBAL:
{
GlobalTable globalTable = (GlobalTable) inputTable;
Partition partition = globalTable.getGlobalDataNode().get(0);
MigrateUtil.MigrateJdbcInput migrateJdbcInput = new MigrateUtil.MigrateJdbcInput();
migrateJdbcInputs.add(migrateJdbcInput);
observables.add(MigrateUtil.read(migrateJdbcInput, partition));
break;
}
case NORMAL:
{
NormalTable normalTable = (NormalTable) inputTable;
Partition partition = normalTable.getDataNode();
MigrateUtil.MigrateJdbcInput migrateJdbcInput = new MigrateUtil.MigrateJdbcInput();
migrateJdbcInputs.add(migrateJdbcInput);
observables.add(MigrateUtil.read(migrateJdbcInput, partition));
break;
}
case VISUAL:
case VIEW:
case CUSTOM:
MigrateUtil.MigrateJdbcInput migrateJdbcInput = new MigrateUtil.MigrateJdbcInput();
migrateJdbcInputs.add(migrateJdbcInput);
observables.add(MigrateUtil.read(migrateJdbcInput, input.getTableName(), input.getSchemaName(), url, username, password));
break;
default:
throw new IllegalStateException("Unexpected value: " + inputTable.getType());
}
} else {
throw new UnsupportedOperationException();
}
String outputSchemaName = outputTable.getSchemaName();
String outputTableName = outputTable.getTableName();
String insertTemplate = getMySQLInsertTemplate(outputTable);
migrateJdbcOutput.setUsername(username);
migrateJdbcOutput.setPassword(password);
migrateJdbcOutput.setUrl(url);
migrateJdbcOutput.setInsertTemplate(insertTemplate);
MigrateUtil.MigrateController migrateController = MigrateUtil.write(migrateJdbcOutput, Flowable.merge(observables.stream().map(i -> i.buffer(output.getBatch()).subscribeOn(Schedulers.io())).collect(Collectors.toList())));
MigrateUtil.MigrateScheduler scheduler = MigrateUtil.register(name, migrateJdbcInputs, migrateJdbcOutput, migrateController);
return response.sendResultSet(MigrateUtil.show(scheduler));
}
if ("BINLOG_LIST".equalsIgnoreCase(cmd)) {
return response.sendResultSet(BinlogUtil.list());
}
if ("BINLOG_STOP".equalsIgnoreCase(cmd)) {
BinlogStopHint hint = JsonUtil.from(body, BinlogStopHint.class);
if (BinlogUtil.stop(hint.getId())) {
dataContext.setAffectedRows(1);
}
return response.sendOk();
}
if ("BINLOG".equalsIgnoreCase(cmd)) {
BinlogHint binlogHint = JsonUtil.from(body, BinlogHint.class);
Objects.requireNonNull(binlogHint.getInputTableNames());
List<String> outputTableNames = binlogHint.getOutputTableNames();
if (outputTableNames == null) {
binlogHint.setInputTableNames(binlogHint.getInputTableNames());
}
IdentityHashMap<TableHandler, TableHandler> map = new IdentityHashMap<>();
List<TableHandler> inputs = new ArrayList<>();
List<TableHandler> outputs = new ArrayList<>();
for (String inputTableName : binlogHint.getInputTableNames()) {
String[] split = inputTableName.split(".");
String schemaName = SQLUtils.normalize(split[0]);
String tableName = SQLUtils.normalize(split[1]);
TableHandler inputTable = metadataManager.getTable(schemaName, tableName);
inputs.add(inputTable);
}
for (String outputTableName : binlogHint.getOutputTableNames()) {
String[] split = outputTableName.split(".");
String schemaName = SQLUtils.normalize(split[0]);
String tableName = SQLUtils.normalize(split[1]);
TableHandler outputTable = metadataManager.getTable(schemaName, tableName);
outputs.add(outputTable);
}
for (int i = 0; i < inputs.size(); i++) {
map.put(inputs.get(i), outputs.get(i));
}
Map<String, Map<String, List<Partition>>> infoCollector = new HashMap<>();
List<MigrateUtil.MigrateController> migrateControllers = new ArrayList<>();
Set<Map.Entry<TableHandler, TableHandler>> entries = map.entrySet();
for (Map.Entry<TableHandler, TableHandler> entry : entries) {
ServerConfig serverConfig = MetaClusterCurrent.wrapper(ServerConfig.class);
TableHandler inputTable = entry.getKey();
TableHandler outputTable = entry.getValue();
UserConfig userConfig = routerConfig.getUsers().get(0);
String username = userConfig.getUsername();
String password = userConfig.getPassword();
String ip = serverConfig.getIp();
int port = serverConfig.getPort();
String url = "jdbc:mysql://" + ip + ":" + port + "/mysql?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true";
// String insertTemplate = getMySQLInsertTemplate(outputTable);
MigrateUtil.MigrateJdbcAnyOutput output = new MigrateUtil.MigrateJdbcAnyOutput();
output.setUrl(url);
output.setUsername(username);
output.setPassword(password);
List<Partition> partitions = new ArrayList<>();
switch(inputTable.getType()) {
case SHARDING:
{
ShardingTable shardingTable = (ShardingTable) inputTable;
partitions = shardingTable.getShardingFuntion().calculate(Collections.emptyMap());
break;
}
case GLOBAL:
{
GlobalTable globalTable = (GlobalTable) inputTable;
partitions = ImmutableList.of(globalTable.getGlobalDataNode().get(0));
break;
}
case NORMAL:
{
NormalTable normalTable = (NormalTable) inputTable;
partitions = ImmutableList.of(normalTable.getDataNode());
break;
}
case CUSTOM:
case VISUAL:
case VIEW:
throw new UnsupportedOperationException();
}
ReplicaSelectorManager replicaSelectorManager = MetaClusterCurrent.wrapper(ReplicaSelectorManager.class);
Map<String, List<Partition>> listMap = partitions.stream().collect(Collectors.groupingBy(partition -> replicaSelectorManager.getDatasourceNameByReplicaName(partition.getTargetName(), true, null)));
infoCollector.put(inputTable.getUniqueName(), listMap);
List<Flowable<BinlogUtil.ParamSQL>> flowables = new ArrayList<>();
for (Map.Entry<String, List<Partition>> e : listMap.entrySet()) {
flowables.add(BinlogUtil.observe(e.getKey(), e.getValue()).subscribeOn(Schedulers.io()));
}
Flowable<BinlogUtil.ParamSQL> merge = flowables.size() == 1 ? flowables.get(0) : Flowable.merge(flowables, flowables.size());
merge = merge.map(paramSQL -> {
SQLStatement sqlStatement = SQLUtils.parseSingleMysqlStatement(paramSQL.getSql());
SQLExprTableSource sqlExprTableSource = VertxUpdateExecuter.getTableSource(sqlStatement);
MycatSQLExprTableSourceUtil.setSqlExprTableSource(outputTable.getSchemaName(), outputTable.getTableName(), sqlExprTableSource);
paramSQL.setSql(sqlStatement.toString());
return paramSQL;
});
MigrateUtil.MigrateController migrateController = MigrateUtil.writeSql(output, merge);
migrateControllers.add(migrateController);
}
BinlogUtil.BinlogScheduler scheduler = BinlogUtil.BinlogScheduler.of(UUID.randomUUID().toString(), binlogHint.getName(), infoCollector, migrateControllers);
BinlogUtil.register(scheduler);
return response.sendResultSet(BinlogUtil.list(Collections.singletonList(scheduler)));
}
mycatDmlHandler(cmd, body);
return response.sendOk();
}
}
return response.sendOk();
} catch (Throwable throwable) {
return response.sendError(throwable);
}
}
Aggregations