use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV100 method processQueryByJobName.
private List<JobExecutionInfo> processQueryByJobName(Connection connection, String jobName, JobExecutionQuery query, Filter tableFilter) throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(jobName));
// Construct the query for job IDs by a given job name
Filter timeRangeFilter = Filter.MISSING;
String jobIdByNameQuery = JOB_ID_QUERY_BY_JOB_NAME_STATEMENT_TEMPLATE;
if (query.hasTimeRange()) {
// Add time range filter if applicable
try {
timeRangeFilter = constructTimeRangeFilter(query.getTimeRange());
if (timeRangeFilter.isPresent()) {
jobIdByNameQuery += " AND " + timeRangeFilter;
}
} catch (ParseException pe) {
LOGGER.error("Failed to parse the query time range", pe);
throw new SQLException(pe);
}
}
// Add ORDER BY
jobIdByNameQuery += " ORDER BY created_ts DESC";
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
// Query job IDs by the given job name
try (PreparedStatement queryStatement = connection.prepareStatement(jobIdByNameQuery)) {
int limit = query.getLimit();
if (limit > 0) {
queryStatement.setMaxRows(limit);
}
queryStatement.setString(1, jobName);
if (timeRangeFilter.isPresent()) {
timeRangeFilter.addParameters(queryStatement, 2);
}
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
jobExecutionInfos.add(processQueryById(connection, rs.getString(1), query, tableFilter));
}
}
}
return jobExecutionInfos;
}
use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV100 method get.
@Override
public synchronized List<JobExecutionInfo> get(JobExecutionQuery query) throws IOException {
Preconditions.checkArgument(query.hasId() && query.hasIdType());
Optional<Connection> connectionOptional = Optional.absent();
try {
connectionOptional = Optional.of(getConnection());
Connection connection = connectionOptional.get();
switch(query.getIdType()) {
case JOB_ID:
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
JobExecutionInfo jobExecutionInfo = processQueryById(connection, query.getId().getString(), query, Filter.MISSING);
if (jobExecutionInfo != null) {
jobExecutionInfos.add(jobExecutionInfo);
}
return jobExecutionInfos;
case JOB_NAME:
return processQueryByJobName(connection, query.getId().getString(), query, Filter.MISSING);
case TABLE:
return processQueryByTable(connection, query);
case LIST_TYPE:
return processListQuery(connection, query);
default:
throw new IOException("Unsupported query ID type: " + query.getIdType().name());
}
} catch (SQLException se) {
LOGGER.error("Failed to execute query: " + query, se);
throw new IOException(se);
} finally {
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().close();
} catch (SQLException se) {
LOGGER.error("Failed to close connection", se);
}
}
}
}
use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV100 method processQueryById.
private JobExecutionInfo processQueryById(Connection connection, String jobId, JobExecutionQuery query, Filter tableFilter) throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(jobId));
// Query job execution information
try (PreparedStatement jobExecutionQueryStatement = connection.prepareStatement(JOB_EXECUTION_QUERY_BY_JOB_ID_STATEMENT_TEMPLATE)) {
jobExecutionQueryStatement.setString(1, jobId);
try (ResultSet jobRs = jobExecutionQueryStatement.executeQuery()) {
if (!jobRs.next()) {
return null;
}
JobExecutionInfo jobExecutionInfo = resultSetToJobExecutionInfo(jobRs);
// Query job metrics
if (query.isIncludeJobMetrics()) {
try (PreparedStatement jobMetricQueryStatement = connection.prepareStatement(JOB_METRIC_QUERY_STATEMENT_TEMPLATE)) {
jobMetricQueryStatement.setString(1, jobRs.getString(2));
try (ResultSet jobMetricRs = jobMetricQueryStatement.executeQuery()) {
MetricArray jobMetrics = new MetricArray();
while (jobMetricRs.next()) {
jobMetrics.add(resultSetToMetric(jobMetricRs));
}
// Add job metrics
jobExecutionInfo.setMetrics(jobMetrics);
}
}
}
// Query job properties
Set<String> requestedJobPropertyKeys = null;
if (query.hasJobProperties()) {
requestedJobPropertyKeys = new HashSet<>(Arrays.asList(query.getJobProperties().split(",")));
}
try (PreparedStatement jobPropertiesQueryStatement = connection.prepareStatement(JOB_PROPERTY_QUERY_STATEMENT_TEMPLATE)) {
jobPropertiesQueryStatement.setString(1, jobExecutionInfo.getJobId());
try (ResultSet jobPropertiesRs = jobPropertiesQueryStatement.executeQuery()) {
Map<String, String> jobProperties = Maps.newHashMap();
while (jobPropertiesRs.next()) {
Map.Entry<String, String> property = resultSetToProperty(jobPropertiesRs);
if (requestedJobPropertyKeys == null || requestedJobPropertyKeys.contains(property.getKey())) {
jobProperties.put(property.getKey(), property.getValue());
}
}
// Add job properties
jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
}
}
// Query task execution information
if (query.isIncludeTaskExecutions()) {
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
String taskExecutionQuery = TASK_EXECUTION_QUERY_STATEMENT_TEMPLATE;
// Add table filter if applicable
if (tableFilter.isPresent()) {
taskExecutionQuery += " AND " + tableFilter;
}
try (PreparedStatement taskExecutionQueryStatement = connection.prepareStatement(taskExecutionQuery)) {
taskExecutionQueryStatement.setString(1, jobId);
if (tableFilter.isPresent()) {
tableFilter.addParameters(taskExecutionQueryStatement, 2);
}
try (ResultSet taskRs = taskExecutionQueryStatement.executeQuery()) {
while (taskRs.next()) {
TaskExecutionInfo taskExecutionInfo = resultSetToTaskExecutionInfo(taskRs);
// Query task metrics for each task execution record
if (query.isIncludeTaskMetrics()) {
try (PreparedStatement taskMetricQueryStatement = connection.prepareStatement(TASK_METRIC_QUERY_STATEMENT_TEMPLATE)) {
taskMetricQueryStatement.setString(1, taskExecutionInfo.getTaskId());
try (ResultSet taskMetricRs = taskMetricQueryStatement.executeQuery()) {
MetricArray taskMetrics = new MetricArray();
while (taskMetricRs.next()) {
taskMetrics.add(resultSetToMetric(taskMetricRs));
}
// Add task metrics
taskExecutionInfo.setMetrics(taskMetrics);
}
}
}
taskExecutionInfos.add(taskExecutionInfo);
// Query task properties
Set<String> queryTaskPropertyKeys = null;
if (query.hasTaskProperties()) {
queryTaskPropertyKeys = new HashSet<>(Arrays.asList(query.getTaskProperties().split(",")));
}
try (PreparedStatement taskPropertiesQueryStatement = connection.prepareStatement(TASK_PROPERTY_QUERY_STATEMENT_TEMPLATE)) {
taskPropertiesQueryStatement.setString(1, taskExecutionInfo.getTaskId());
try (ResultSet taskPropertiesRs = taskPropertiesQueryStatement.executeQuery()) {
Map<String, String> taskProperties = Maps.newHashMap();
while (taskPropertiesRs.next()) {
Map.Entry<String, String> property = resultSetToProperty(taskPropertiesRs);
if (queryTaskPropertyKeys == null || queryTaskPropertyKeys.contains(property.getKey())) {
taskProperties.put(property.getKey(), property.getValue());
}
}
// Add job properties
taskExecutionInfo.setTaskProperties(new StringMap(taskProperties));
}
}
// Add task properties
}
// Add task execution information
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
}
}
}
return jobExecutionInfo;
}
}
}
use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method addTasksToJobExecutions.
private void addTasksToJobExecutions(Connection connection, JobExecutionQuery query, Filter tableFilter, Map<String, JobExecutionInfo> jobExecutionInfos) throws SQLException {
Map<String, Map<String, TaskExecutionInfo>> tasksExecutions = getTasksForJobExecutions(connection, query, tableFilter, jobExecutionInfos);
addMetricsToTasks(connection, query, tableFilter, tasksExecutions);
addPropertiesToTasks(connection, query, tableFilter, tasksExecutions);
for (Map.Entry<String, Map<String, TaskExecutionInfo>> taskExecution : tasksExecutions.entrySet()) {
JobExecutionInfo jobExecutionInfo = jobExecutionInfos.get(taskExecution.getKey());
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
for (TaskExecutionInfo taskExecutionInfo : taskExecution.getValue().values()) {
taskExecutionInfos.add(taskExecutionInfo);
}
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
}
}
use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method processQueryByTable.
private List<JobExecutionInfo> processQueryByTable(Connection connection, JobExecutionQuery query) throws SQLException {
Preconditions.checkArgument(query.getId().isTable());
Filter tableFilter = constructTableFilter(query.getId().getTable());
String jobsWithoutTaskFilter = "";
if (!query.isIncludeJobsWithoutTasks()) {
jobsWithoutTaskFilter = " AND " + FILTER_JOBS_WITH_TASKS;
}
// Construct the query for job names by table definition
String jobNameByTableQuery = String.format(JOB_NAME_QUERY_BY_TABLE_STATEMENT_TEMPLATE, tableFilter.getFilter(), jobsWithoutTaskFilter);
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
// Query job names by table definition
try (PreparedStatement queryStatement = connection.prepareStatement(jobNameByTableQuery)) {
if (tableFilter.isPresent()) {
tableFilter.addParameters(queryStatement, 1);
}
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
jobExecutionInfos.addAll(processQueryByJobName(connection, rs.getString(1), query, tableFilter));
}
}
return jobExecutionInfos;
}
}
Aggregations