use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method upsertTaskProperties.
private void upsertTaskProperties(Connection connection, Optional<StringMap> jobProperties, TaskExecutionInfoArray taskExecutions) throws SQLException {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (TaskExecutionInfo taskExecution : taskExecutions) {
if (taskExecution.hasTaskProperties()) {
for (Map.Entry<String, String> property : taskExecution.getTaskProperties().entrySet()) {
if (!jobProperties.isPresent() || !jobProperties.get().containsKey(property.getKey()) || !jobProperties.get().get(property.getKey()).equals(property.getValue())) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(TASK_PROPERTY_UPSERT_STATEMENT_TEMPLATE));
}
addPropertyToBatch(upsertStatement.get(), property.getKey(), property.getValue(), taskExecution.getTaskId());
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
}
}
}
executeBatches(upsertStatement);
}
use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method addTasksToJobExecutions.
private void addTasksToJobExecutions(Connection connection, JobExecutionQuery query, Filter tableFilter, Map<String, JobExecutionInfo> jobExecutionInfos) throws SQLException {
Map<String, Map<String, TaskExecutionInfo>> tasksExecutions = getTasksForJobExecutions(connection, query, tableFilter, jobExecutionInfos);
addMetricsToTasks(connection, query, tableFilter, tasksExecutions);
addPropertiesToTasks(connection, query, tableFilter, tasksExecutions);
for (Map.Entry<String, Map<String, TaskExecutionInfo>> taskExecution : tasksExecutions.entrySet()) {
JobExecutionInfo jobExecutionInfo = jobExecutionInfos.get(taskExecution.getKey());
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
for (TaskExecutionInfo taskExecutionInfo : taskExecution.getValue().values()) {
taskExecutionInfos.add(taskExecutionInfo);
}
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
}
}
use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method addMetricsToTasks.
private void addMetricsToTasks(Connection connection, JobExecutionQuery query, Filter tableFilter, Map<String, Map<String, TaskExecutionInfo>> taskExecutionInfos) throws SQLException {
if (query.isIncludeTaskMetrics() && taskExecutionInfos.size() > 0) {
int index = 1;
String template = String.format(TASK_METRIC_QUERY_STATEMENT_TEMPLATE, getInPredicate(taskExecutionInfos.size()));
if (tableFilter.isPresent()) {
template += " AND t." + tableFilter;
}
try (PreparedStatement taskMetricQueryStatement = connection.prepareStatement(template)) {
for (String jobId : taskExecutionInfos.keySet()) {
taskMetricQueryStatement.setString(index++, jobId);
}
if (tableFilter.isPresent()) {
tableFilter.addParameters(taskMetricQueryStatement, index);
}
try (ResultSet taskMetricRs = taskMetricQueryStatement.executeQuery()) {
while (taskMetricRs.next()) {
String jobId = taskMetricRs.getString("job_id");
String taskId = taskMetricRs.getString("task_id");
TaskExecutionInfo taskExecutionInfo = taskExecutionInfos.get(jobId).get(taskId);
MetricArray metricsArray = taskExecutionInfo.getMetrics(GetMode.NULL);
if (metricsArray == null) {
metricsArray = new MetricArray();
taskExecutionInfo.setMetrics(metricsArray);
}
metricsArray.add(resultSetToMetric(taskMetricRs));
}
}
}
}
}
use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method addPropertiesToTasks.
private void addPropertiesToTasks(Connection connection, JobExecutionQuery query, Filter tableFilter, Map<String, Map<String, TaskExecutionInfo>> taskExecutionInfos) throws SQLException {
if (taskExecutionInfos.size() > 0) {
Set<String> propertyKeys = null;
if (query.hasTaskProperties()) {
propertyKeys = Sets.newHashSet(Iterables.filter(Arrays.asList(query.getTaskProperties().split(",")), new Predicate<String>() {
@Override
public boolean apply(String input) {
return !Strings.isNullOrEmpty(input);
}
}));
}
if (propertyKeys == null || propertyKeys.size() > 0) {
String template = String.format(TASK_PROPERTY_QUERY_STATEMENT_TEMPLATE, getInPredicate(taskExecutionInfos.size()));
if (propertyKeys != null && propertyKeys.size() > 0) {
template += String.format("AND property_key IN (%s)", getInPredicate(propertyKeys.size()));
}
if (tableFilter.isPresent()) {
template += " AND t." + tableFilter;
}
int index = 1;
try (PreparedStatement taskPropertiesQueryStatement = connection.prepareStatement(template)) {
for (String jobId : taskExecutionInfos.keySet()) {
taskPropertiesQueryStatement.setString(index++, jobId);
}
if (propertyKeys != null && propertyKeys.size() > 0) {
for (String propertyKey : propertyKeys) {
taskPropertiesQueryStatement.setString(index++, propertyKey);
}
}
if (tableFilter.isPresent()) {
tableFilter.addParameters(taskPropertiesQueryStatement, index);
}
try (ResultSet taskPropertiesRs = taskPropertiesQueryStatement.executeQuery()) {
while (taskPropertiesRs.next()) {
String jobId = taskPropertiesRs.getString("job_id");
String taskId = taskPropertiesRs.getString("task_id");
TaskExecutionInfo taskExecutionInfo = taskExecutionInfos.get(jobId).get(taskId);
StringMap taskProperties = taskExecutionInfo.getTaskProperties(GetMode.NULL);
if (taskProperties == null) {
taskProperties = new StringMap();
taskExecutionInfo.setTaskProperties(taskProperties);
}
Map.Entry<String, String> property = resultSetToProperty(taskPropertiesRs);
if (propertyKeys == null || propertyKeys.contains(property.getKey())) {
taskProperties.put(property.getKey(), property.getValue());
}
}
}
}
}
}
}
use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method upsertTaskExecutionInfos.
private void upsertTaskExecutionInfos(Connection connection, TaskExecutionInfoArray taskExecutions) throws SQLException {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (TaskExecutionInfo taskExecution : taskExecutions) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(TASK_EXECUTION_UPSERT_STATEMENT_TEMPLATE));
}
addTaskExecutionInfoToBatch(upsertStatement.get(), taskExecution);
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
executeBatches(upsertStatement);
}
Aggregations