use of org.apache.commons.dbutils.QueryRunner in project metacat by Netflix.
the class HiveConnectorFastPartitionService method getPartitionNames.
/**
* getPartitionNames.
*
* @param uris uris
* @param prefixSearch prefixSearch
* @return partition names
*/
@Override
public Map<String, List<QualifiedName>> getPartitionNames(@Nonnull final ConnectorContext context, @Nonnull final List<String> uris, final boolean prefixSearch) {
final long start = registry.clock().monotonicTime();
final Map<String, String> tags = new HashMap<String, String>();
tags.put("request", HiveMetrics.getPartitionNames.name());
final Map<String, List<QualifiedName>> result = Maps.newHashMap();
// Get data source
final DataSource dataSource = DataSourceManager.get().get(catalogName);
// Create the sql
final StringBuilder queryBuilder = new StringBuilder(SQL_GET_PARTITION_NAMES_BY_URI);
final List<String> params = Lists.newArrayList();
if (prefixSearch) {
queryBuilder.append(" 1=2");
uris.forEach(uri -> {
queryBuilder.append(" or location like ?");
params.add(uri + "%");
});
} else {
queryBuilder.append(" location in (");
Joiner.on(',').appendTo(queryBuilder, uris.stream().map(uri -> "?").collect(Collectors.toList()));
queryBuilder.append(")");
params.addAll(uris);
}
// Handler for reading the result set
final ResultSetHandler<Map<String, List<QualifiedName>>> handler = rs -> {
while (rs.next()) {
final String schemaName = rs.getString("schema_name");
final String tableName = rs.getString("table_name");
final String partitionName = rs.getString("partition_name");
final String uri = rs.getString("location");
final List<QualifiedName> partitionNames = result.get(uri);
final QualifiedName qualifiedName = QualifiedName.ofPartition(catalogName, schemaName, tableName, partitionName);
if (partitionNames == null) {
result.put(uri, Lists.newArrayList(qualifiedName));
} else {
partitionNames.add(qualifiedName);
}
}
return result;
};
try (Connection conn = dataSource.getConnection()) {
new QueryRunner().query(conn, queryBuilder.toString(), handler, params.toArray());
} catch (SQLException e) {
Throwables.propagate(e);
} finally {
final long duration = registry.clock().monotonicTime() - start;
log.debug("### Time taken to complete getPartitionNames is {} ms", duration);
this.registry.timer(requestTimerId.withTags(tags)).record(duration, TimeUnit.MILLISECONDS);
}
return result;
}
use of org.apache.commons.dbutils.QueryRunner in project metacat by Netflix.
the class HiveConnectorFastPartitionService method getPartitionKeys.
private List<FieldSchema> getPartitionKeys(final String databaseName, final String tableName) {
// Get data source
final DataSource dataSource = DataSourceManager.get().get(catalogName);
final ResultSetHandler<List<FieldSchema>> handler = rs -> {
final List<FieldSchema> result = Lists.newArrayList();
while (rs.next()) {
final String name = rs.getString("pkey_name");
final String type = rs.getString("pkey_type");
result.add(new FieldSchema(name, type, null));
}
return result;
};
try (Connection conn = dataSource.getConnection()) {
return new QueryRunner().query(conn, SQL_GET_PARTITION_KEYS, handler, databaseName, tableName);
} catch (SQLException e) {
throw Throwables.propagate(e);
}
}
use of org.apache.commons.dbutils.QueryRunner in project metacat by Netflix.
the class HiveConnectorFastTableService method getTableNames.
@Override
public Map<String, List<QualifiedName>> getTableNames(@Nonnull final ConnectorContext context, @Nonnull final List<String> uris, final boolean prefixSearch) {
final long start = registry.clock().monotonicTime();
final Map<String, String> tags = new HashMap<String, String>();
tags.put("request", HiveMetrics.getTableNames.name());
final Map<String, List<QualifiedName>> result = Maps.newHashMap();
// Get data source
final DataSource dataSource = DataSourceManager.get().get(catalogName);
// Create the sql
final StringBuilder queryBuilder = new StringBuilder(SQL_GET_TABLE_NAMES_BY_URI);
final List<String> params = Lists.newArrayList();
if (prefixSearch) {
queryBuilder.append(" and (1=0");
uris.forEach(uri -> {
queryBuilder.append(" or location like ?");
params.add(uri + "%");
});
queryBuilder.append(" )");
} else {
queryBuilder.append(" and location in (");
uris.forEach(uri -> {
queryBuilder.append("?,");
params.add(uri);
});
queryBuilder.deleteCharAt(queryBuilder.length() - 1).append(")");
}
// Handler for reading the result set
ResultSetHandler<Map<String, List<QualifiedName>>> handler = rs -> {
while (rs.next()) {
final String schemaName = rs.getString("schema_name");
final String tableName = rs.getString("table_name");
final String uri = rs.getString("location");
List<QualifiedName> names = result.get(uri);
if (names == null) {
names = Lists.newArrayList();
result.put(uri, names);
}
names.add(QualifiedName.ofTable(catalogName, schemaName, tableName));
}
return result;
};
try (Connection conn = dataSource.getConnection()) {
new QueryRunner().query(conn, queryBuilder.toString(), handler, params.toArray());
} catch (SQLException e) {
throw Throwables.propagate(e);
} finally {
final long duration = registry.clock().monotonicTime() - start;
log.debug("### Time taken to complete getTableNames is {} ms", duration);
this.registry.timer(requestTimerId.withTags(tags)).record(duration, TimeUnit.MILLISECONDS);
}
return result;
}
use of org.apache.commons.dbutils.QueryRunner in project metacat by Netflix.
the class MysqlUserMetadataService method executeUpdateForKey.
private int executeUpdateForKey(final String query, final String... keyValues) {
int result;
try {
final Connection conn = poolingDataSource.getConnection();
try {
result = new QueryRunner().update(conn, query, (Object[]) keyValues);
conn.commit();
} catch (SQLException e) {
conn.rollback();
throw e;
} finally {
conn.close();
}
} catch (SQLException e) {
log.error("Sql exception", e);
throw new UserMetadataServiceException(String.format("Failed to save data for %s", Arrays.toString(keyValues)), e);
}
return result;
}
use of org.apache.commons.dbutils.QueryRunner in project metacat by Netflix.
the class MysqlUserMetadataService method searchDefinitionMetadatas.
@Override
public List<DefinitionMetadataDto> searchDefinitionMetadatas(final Set<String> propertyNames, final String type, final String name, final String sortBy, final String sortOrder, final Integer offset, final Integer limit) {
final List<DefinitionMetadataDto> result = Lists.newArrayList();
final StringBuilder query = new StringBuilder(SQL.SEARCH_DEFINITION_METADATAS);
final List<Object> paramList = Lists.newArrayList();
if (type != null) {
String typeRegex = null;
switch(type) {
case "database":
typeRegex = "^[^/]*/[^/]*$";
break;
case "table":
typeRegex = "^[^/]*/[^/]*/[^/]*$";
break;
case "partition":
typeRegex = "^[^/]*/[^/]*/[^/]*/.*$";
break;
default:
}
if (typeRegex != null) {
query.append(" and name rlike ?");
paramList.add(typeRegex);
}
}
if (propertyNames != null && !propertyNames.isEmpty()) {
propertyNames.forEach(propertyName -> {
query.append(" and data like ?");
paramList.add("%\"" + propertyName + "\":%");
});
}
if (!Strings.isNullOrEmpty(name)) {
query.append(" and name like ?");
paramList.add(name);
}
if (!Strings.isNullOrEmpty(sortBy)) {
query.append(" order by ").append(sortBy);
if (!Strings.isNullOrEmpty(sortOrder)) {
query.append(" ").append(sortOrder);
}
}
if (limit != null) {
query.append(" limit ");
if (offset != null) {
query.append(offset).append(",");
}
query.append(limit);
}
final Object[] params = new Object[paramList.size()];
final Connection connection = DBUtil.getReadConnection(poolingDataSource);
try {
// Handler for reading the result set
final ResultSetHandler<Void> handler = rs -> {
while (rs.next()) {
final String definitionName = rs.getString("name");
final String data = rs.getString("data");
final DefinitionMetadataDto definitionMetadataDto = new DefinitionMetadataDto();
definitionMetadataDto.setName(QualifiedName.fromString(definitionName));
definitionMetadataDto.setDefinitionMetadata(metacatJson.parseJsonObject(data));
result.add(definitionMetadataDto);
}
return null;
};
new QueryRunner().query(connection, query.toString(), handler, paramList.toArray(params));
} catch (SQLException e) {
log.error("Sql exception", e);
throw new UserMetadataServiceException("Failed to get definition data", e);
} finally {
DBUtil.closeReadConnection(connection);
}
return result;
}
Aggregations