Search in sources :

Example 1 with TaskExecutionInfo

use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.

the class DatabaseJobHistoryStoreV100 method put.

@Override
public synchronized void put(JobExecutionInfo jobExecutionInfo) throws IOException {
    Optional<Connection> connectionOptional = Optional.absent();
    try {
        connectionOptional = Optional.of(getConnection());
        Connection connection = connectionOptional.get();
        connection.setAutoCommit(false);
        // Insert or update job execution information
        if (existsJobExecutionInfo(connection, jobExecutionInfo)) {
            updateJobExecutionInfo(connection, jobExecutionInfo);
        } else {
            insertJobExecutionInfo(connection, jobExecutionInfo);
        }
        // Insert or update job metrics
        if (jobExecutionInfo.hasMetrics()) {
            for (Metric metric : jobExecutionInfo.getMetrics()) {
                boolean insert = !existsMetric(connection, JOB_METRIC_EXIST_QUERY_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), metric);
                updateMetric(connection, insert ? JOB_METRIC_INSERT_STATEMENT_TEMPLATE : JOB_METRIC_UPDATE_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), metric, insert);
            }
        }
        // Insert or update job properties
        if (jobExecutionInfo.hasJobProperties()) {
            for (Map.Entry<String, String> entry : jobExecutionInfo.getJobProperties().entrySet()) {
                boolean insert = !existsProperty(connection, JOB_PROPERTY_EXIST_QUERY_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), entry.getKey());
                updateProperty(connection, insert ? JOB_PROPERTY_INSERT_STATEMENT_TEMPLATE : JOB_PROPERTY_UPDATE_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), entry.getKey(), entry.getValue(), insert);
            }
        }
        // Insert or update task execution information
        if (jobExecutionInfo.hasTaskExecutions()) {
            for (TaskExecutionInfo info : jobExecutionInfo.getTaskExecutions()) {
                // Insert or update task execution information
                if (existsTaskExecutionInfo(connection, info)) {
                    updateTaskExecutionInfo(connection, info);
                } else {
                    insertTaskExecutionInfo(connection, info);
                }
                // Insert or update task metrics
                if (info.hasMetrics()) {
                    for (Metric metric : info.getMetrics()) {
                        boolean insert = !existsMetric(connection, TASK_METRIC_EXIST_QUERY_STATEMENT_TEMPLATE, info.getTaskId(), metric);
                        updateMetric(connection, insert ? TASK_METRIC_INSERT_STATEMENT_TEMPLATE : TASK_METRIC_UPDATE_STATEMENT_TEMPLATE, info.getTaskId(), metric, insert);
                    }
                }
                // Insert or update task properties
                if (info.hasTaskProperties()) {
                    for (Map.Entry<String, String> entry : info.getTaskProperties().entrySet()) {
                        boolean insert = !existsProperty(connection, TASK_PROPERTY_EXIST_QUERY_STATEMENT_TEMPLATE, info.getTaskId(), entry.getKey());
                        updateProperty(connection, insert ? TASK_PROPERTY_INSERT_STATEMENT_TEMPLATE : TASK_PROPERTY_UPDATE_STATEMENT_TEMPLATE, info.getTaskId(), entry.getKey(), entry.getValue(), insert);
                    }
                }
            }
        }
        connection.commit();
    } catch (SQLException se) {
        LOGGER.error("Failed to put a new job execution information record", se);
        if (connectionOptional.isPresent()) {
            try {
                connectionOptional.get().rollback();
            } catch (SQLException se1) {
                LOGGER.error("Failed to rollback", se1);
            }
        }
        throw new IOException(se);
    } finally {
        if (connectionOptional.isPresent()) {
            try {
                connectionOptional.get().close();
            } catch (SQLException se) {
                LOGGER.error("Failed to close connection", se);
            }
        }
    }
}
Also used : TaskExecutionInfo(org.apache.gobblin.rest.TaskExecutionInfo) SQLException(java.sql.SQLException) Connection(java.sql.Connection) Metric(org.apache.gobblin.rest.Metric) IOException(java.io.IOException) Map(java.util.Map) StringMap(com.linkedin.data.template.StringMap) AbstractMap(java.util.AbstractMap)

Example 2 with TaskExecutionInfo

use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.

the class DatabaseJobHistoryStoreV101 method upsertTaskMetrics.

private void upsertTaskMetrics(Connection connection, TaskExecutionInfoArray taskExecutions) throws SQLException {
    Optional<PreparedStatement> upsertStatement = Optional.absent();
    int batchSize = 0;
    for (TaskExecutionInfo taskExecution : taskExecutions) {
        if (taskExecution.hasMetrics()) {
            for (Metric metric : taskExecution.getMetrics()) {
                if (!upsertStatement.isPresent()) {
                    upsertStatement = Optional.of(connection.prepareStatement(TASK_METRIC_UPSERT_STATEMENT_TEMPLATE));
                }
                addMetricToBatch(upsertStatement.get(), metric, taskExecution.getTaskId());
                if (batchSize++ > 1000) {
                    executeBatches(upsertStatement);
                    upsertStatement = Optional.absent();
                    batchSize = 0;
                }
            }
        }
    }
    executeBatches(upsertStatement);
}
Also used : TaskExecutionInfo(org.apache.gobblin.rest.TaskExecutionInfo) PreparedStatement(java.sql.PreparedStatement) Metric(org.apache.gobblin.rest.Metric)

Example 3 with TaskExecutionInfo

use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.

the class DatabaseJobHistoryStoreV101 method getTasksForJobExecutions.

private Map<String, Map<String, TaskExecutionInfo>> getTasksForJobExecutions(Connection connection, JobExecutionQuery query, Filter tableFilter, Map<String, JobExecutionInfo> jobExecutionInfos) throws SQLException {
    Map<String, Map<String, TaskExecutionInfo>> taskExecutionInfos = Maps.newLinkedHashMap();
    if (query.isIncludeTaskExecutions() && jobExecutionInfos.size() > 0) {
        String template = String.format(TASK_EXECUTION_QUERY_STATEMENT_TEMPLATE, getInPredicate(jobExecutionInfos.size()));
        if (tableFilter.isPresent()) {
            template += " AND " + tableFilter;
        }
        int index = 1;
        try (PreparedStatement taskExecutionQueryStatement = connection.prepareStatement(template)) {
            for (String jobId : jobExecutionInfos.keySet()) {
                taskExecutionQueryStatement.setString(index++, jobId);
            }
            if (tableFilter.isPresent()) {
                tableFilter.addParameters(taskExecutionQueryStatement, index);
            }
            try (ResultSet taskRs = taskExecutionQueryStatement.executeQuery()) {
                while (taskRs.next()) {
                    TaskExecutionInfo taskExecutionInfo = resultSetToTaskExecutionInfo(taskRs);
                    if (!taskExecutionInfos.containsKey(taskExecutionInfo.getJobId())) {
                        taskExecutionInfos.put(taskExecutionInfo.getJobId(), Maps.<String, TaskExecutionInfo>newLinkedHashMap());
                    }
                    taskExecutionInfos.get(taskExecutionInfo.getJobId()).put(taskExecutionInfo.getTaskId(), taskExecutionInfo);
                }
            }
        }
    }
    return taskExecutionInfos;
}
Also used : TaskExecutionInfo(org.apache.gobblin.rest.TaskExecutionInfo) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) Map(java.util.Map) StringMap(com.linkedin.data.template.StringMap) AbstractMap(java.util.AbstractMap)

Example 4 with TaskExecutionInfo

use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.

the class DatabaseJobHistoryStoreV101 method resultSetToTaskExecutionInfo.

private TaskExecutionInfo resultSetToTaskExecutionInfo(ResultSet rs) throws SQLException {
    TaskExecutionInfo taskExecutionInfo = new TaskExecutionInfo();
    taskExecutionInfo.setTaskId(rs.getString("task_id"));
    taskExecutionInfo.setJobId(rs.getString("job_id"));
    try {
        Timestamp startTime = rs.getTimestamp("start_time");
        if (startTime != null) {
            taskExecutionInfo.setStartTime(startTime.getTime());
        }
    } catch (SQLException se) {
        taskExecutionInfo.setStartTime(0);
    }
    try {
        Timestamp endTime = rs.getTimestamp("end_time");
        if (endTime != null) {
            taskExecutionInfo.setEndTime(endTime.getTime());
        }
    } catch (SQLException se) {
        taskExecutionInfo.setEndTime(0);
    }
    taskExecutionInfo.setDuration(rs.getLong("duration"));
    String state = rs.getString("state");
    if (!Strings.isNullOrEmpty(state)) {
        taskExecutionInfo.setState(TaskStateEnum.valueOf(state));
    }
    String failureException = rs.getString("failure_exception");
    if (!Strings.isNullOrEmpty(failureException)) {
        taskExecutionInfo.setFailureException(failureException);
    }
    taskExecutionInfo.setLowWatermark(rs.getLong("low_watermark"));
    taskExecutionInfo.setHighWatermark(rs.getLong("high_watermark"));
    Table table = new Table();
    String namespace = rs.getString("table_namespace");
    if (!Strings.isNullOrEmpty(namespace)) {
        table.setNamespace(namespace);
    }
    String name = rs.getString("table_name");
    if (!Strings.isNullOrEmpty(name)) {
        table.setName(name);
    }
    String type = rs.getString("table_type");
    if (!Strings.isNullOrEmpty(type)) {
        table.setType(TableTypeEnum.valueOf(type));
    }
    taskExecutionInfo.setTable(table);
    return taskExecutionInfo;
}
Also used : Table(org.apache.gobblin.rest.Table) TaskExecutionInfo(org.apache.gobblin.rest.TaskExecutionInfo) SQLException(java.sql.SQLException) Timestamp(java.sql.Timestamp)

Example 5 with TaskExecutionInfo

use of org.apache.gobblin.rest.TaskExecutionInfo in project incubator-gobblin by apache.

the class DatabaseJobHistoryStoreTest method create.

private JobExecutionInfo create(int index, boolean differentTableType) {
    JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
    jobExecutionInfo.setJobName("TestJob" + index);
    jobExecutionInfo.setJobId(jobExecutionInfo.getJobName() + "_" + System.currentTimeMillis());
    jobExecutionInfo.setStartTime(System.currentTimeMillis());
    jobExecutionInfo.setState(JobStateEnum.PENDING);
    jobExecutionInfo.setLaunchedTasks(2);
    jobExecutionInfo.setCompletedTasks(0);
    jobExecutionInfo.setLauncherType(LauncherTypeEnum.LOCAL);
    jobExecutionInfo.setTrackingUrl("localhost");
    MetricArray jobMetrics = new MetricArray();
    Metric jobMetric1 = new Metric();
    jobMetric1.setGroup("JOB");
    jobMetric1.setName("jm1");
    jobMetric1.setType(MetricTypeEnum.COUNTER);
    jobMetric1.setValue("100");
    jobMetrics.add(jobMetric1);
    jobExecutionInfo.setMetrics(jobMetrics);
    Map<String, String> jobProperties = Maps.newHashMap();
    jobProperties.put("k" + index, "v" + index);
    jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
    TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
    TaskExecutionInfo taskExecutionInfo1 = new TaskExecutionInfo();
    taskExecutionInfo1.setJobId(jobExecutionInfo.getJobId());
    taskExecutionInfo1.setTaskId(jobExecutionInfo.getJobId() + "_0");
    taskExecutionInfo1.setStartTime(System.currentTimeMillis());
    taskExecutionInfo1.setState(TaskStateEnum.PENDING);
    taskExecutionInfo1.setLowWatermark(0L);
    taskExecutionInfo1.setHighWatermark(1000L);
    Table table1 = new Table();
    table1.setNamespace("Test");
    table1.setName("Test1");
    table1.setType(TableTypeEnum.SNAPSHOT_ONLY);
    taskExecutionInfo1.setTable(table1);
    MetricArray taskMetrics1 = new MetricArray();
    Metric taskMetric1 = new Metric();
    taskMetric1.setGroup("TASK");
    taskMetric1.setName("tm1");
    taskMetric1.setType(MetricTypeEnum.COUNTER);
    taskMetric1.setValue("100");
    taskMetrics1.add(taskMetric1);
    taskExecutionInfo1.setMetrics(taskMetrics1);
    Map<String, String> taskProperties1 = Maps.newHashMap();
    taskProperties1.put("k1" + index, "v1" + index);
    taskExecutionInfo1.setTaskProperties(new StringMap(taskProperties1));
    taskExecutionInfos.add(taskExecutionInfo1);
    TaskExecutionInfo taskExecutionInfo2 = new TaskExecutionInfo();
    taskExecutionInfo2.setJobId(jobExecutionInfo.getJobId());
    taskExecutionInfo2.setTaskId(jobExecutionInfo.getJobId() + "_1");
    taskExecutionInfo2.setStartTime(System.currentTimeMillis());
    taskExecutionInfo2.setState(TaskStateEnum.PENDING);
    taskExecutionInfo2.setLowWatermark(0L);
    taskExecutionInfo2.setHighWatermark(2000L);
    Table table2 = new Table();
    table2.setNamespace("Test");
    table2.setName("Test2");
    table2.setType(differentTableType ? TableTypeEnum.SNAPSHOT_APPEND : TableTypeEnum.SNAPSHOT_ONLY);
    taskExecutionInfo2.setTable(table2);
    MetricArray taskMetrics2 = new MetricArray();
    Metric taskMetric2 = new Metric();
    taskMetric2.setGroup("TASK");
    taskMetric2.setName("tm2");
    taskMetric2.setType(MetricTypeEnum.COUNTER);
    taskMetric2.setValue("100");
    taskMetrics2.add(taskMetric2);
    taskExecutionInfo2.setMetrics(taskMetrics2);
    Map<String, String> taskProperties2 = Maps.newHashMap();
    taskProperties2.put("k2" + index, "v2" + index);
    taskExecutionInfo2.setTaskProperties(new StringMap(taskProperties2));
    taskExecutionInfos.add(taskExecutionInfo2);
    jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
    this.expectedJobExecutionInfos.add(jobExecutionInfo);
    return jobExecutionInfo;
}
Also used : StringMap(com.linkedin.data.template.StringMap) Table(org.apache.gobblin.rest.Table) TaskExecutionInfo(org.apache.gobblin.rest.TaskExecutionInfo) TaskExecutionInfoArray(org.apache.gobblin.rest.TaskExecutionInfoArray) MetricArray(org.apache.gobblin.rest.MetricArray) Metric(org.apache.gobblin.rest.Metric) JobExecutionInfo(org.apache.gobblin.rest.JobExecutionInfo)

Aggregations

TaskExecutionInfo (org.apache.gobblin.rest.TaskExecutionInfo)16 StringMap (com.linkedin.data.template.StringMap)8 PreparedStatement (java.sql.PreparedStatement)7 Map (java.util.Map)7 AbstractMap (java.util.AbstractMap)6 JobExecutionInfo (org.apache.gobblin.rest.JobExecutionInfo)5 ResultSet (java.sql.ResultSet)4 Metric (org.apache.gobblin.rest.Metric)4 MetricArray (org.apache.gobblin.rest.MetricArray)4 Table (org.apache.gobblin.rest.Table)4 SQLException (java.sql.SQLException)3 TaskExecutionInfoArray (org.apache.gobblin.rest.TaskExecutionInfoArray)3 Test (org.testng.annotations.Test)3 Counter (com.codahale.metrics.Counter)1 Gauge (com.codahale.metrics.Gauge)1 Meter (com.codahale.metrics.Meter)1 IOException (java.io.IOException)1 Connection (java.sql.Connection)1 Timestamp (java.sql.Timestamp)1 TaskMetrics (org.apache.gobblin.runtime.util.TaskMetrics)1