use of org.apache.gobblin.rest.Metric in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV100 method put.
@Override
public synchronized void put(JobExecutionInfo jobExecutionInfo) throws IOException {
Optional<Connection> connectionOptional = Optional.absent();
try {
connectionOptional = Optional.of(getConnection());
Connection connection = connectionOptional.get();
connection.setAutoCommit(false);
// Insert or update job execution information
if (existsJobExecutionInfo(connection, jobExecutionInfo)) {
updateJobExecutionInfo(connection, jobExecutionInfo);
} else {
insertJobExecutionInfo(connection, jobExecutionInfo);
}
// Insert or update job metrics
if (jobExecutionInfo.hasMetrics()) {
for (Metric metric : jobExecutionInfo.getMetrics()) {
boolean insert = !existsMetric(connection, JOB_METRIC_EXIST_QUERY_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), metric);
updateMetric(connection, insert ? JOB_METRIC_INSERT_STATEMENT_TEMPLATE : JOB_METRIC_UPDATE_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), metric, insert);
}
}
// Insert or update job properties
if (jobExecutionInfo.hasJobProperties()) {
for (Map.Entry<String, String> entry : jobExecutionInfo.getJobProperties().entrySet()) {
boolean insert = !existsProperty(connection, JOB_PROPERTY_EXIST_QUERY_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), entry.getKey());
updateProperty(connection, insert ? JOB_PROPERTY_INSERT_STATEMENT_TEMPLATE : JOB_PROPERTY_UPDATE_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), entry.getKey(), entry.getValue(), insert);
}
}
// Insert or update task execution information
if (jobExecutionInfo.hasTaskExecutions()) {
for (TaskExecutionInfo info : jobExecutionInfo.getTaskExecutions()) {
// Insert or update task execution information
if (existsTaskExecutionInfo(connection, info)) {
updateTaskExecutionInfo(connection, info);
} else {
insertTaskExecutionInfo(connection, info);
}
// Insert or update task metrics
if (info.hasMetrics()) {
for (Metric metric : info.getMetrics()) {
boolean insert = !existsMetric(connection, TASK_METRIC_EXIST_QUERY_STATEMENT_TEMPLATE, info.getTaskId(), metric);
updateMetric(connection, insert ? TASK_METRIC_INSERT_STATEMENT_TEMPLATE : TASK_METRIC_UPDATE_STATEMENT_TEMPLATE, info.getTaskId(), metric, insert);
}
}
// Insert or update task properties
if (info.hasTaskProperties()) {
for (Map.Entry<String, String> entry : info.getTaskProperties().entrySet()) {
boolean insert = !existsProperty(connection, TASK_PROPERTY_EXIST_QUERY_STATEMENT_TEMPLATE, info.getTaskId(), entry.getKey());
updateProperty(connection, insert ? TASK_PROPERTY_INSERT_STATEMENT_TEMPLATE : TASK_PROPERTY_UPDATE_STATEMENT_TEMPLATE, info.getTaskId(), entry.getKey(), entry.getValue(), insert);
}
}
}
}
connection.commit();
} catch (SQLException se) {
LOGGER.error("Failed to put a new job execution information record", se);
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().rollback();
} catch (SQLException se1) {
LOGGER.error("Failed to rollback", se1);
}
}
throw new IOException(se);
} finally {
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().close();
} catch (SQLException se) {
LOGGER.error("Failed to close connection", se);
}
}
}
}
use of org.apache.gobblin.rest.Metric in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method resultSetToMetric.
private Metric resultSetToMetric(ResultSet rs) throws SQLException {
Metric metric = new Metric();
metric.setGroup(rs.getString("metric_group"));
metric.setName(rs.getString("metric_name"));
metric.setType(MetricTypeEnum.valueOf(rs.getString("metric_type")));
metric.setValue(rs.getString("metric_value"));
return metric;
}
use of org.apache.gobblin.rest.Metric in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method upsertJobMetrics.
private void upsertJobMetrics(Connection connection, JobExecutionInfo jobExecutionInfo) throws SQLException {
if (jobExecutionInfo.hasMetrics()) {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (Metric metric : jobExecutionInfo.getMetrics()) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(JOB_METRIC_UPSERT_STATEMENT_TEMPLATE));
}
addMetricToBatch(upsertStatement.get(), metric, jobExecutionInfo.getJobId());
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
executeBatches(upsertStatement);
}
}
use of org.apache.gobblin.rest.Metric in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV101 method upsertTaskMetrics.
private void upsertTaskMetrics(Connection connection, TaskExecutionInfoArray taskExecutions) throws SQLException {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (TaskExecutionInfo taskExecution : taskExecutions) {
if (taskExecution.hasMetrics()) {
for (Metric metric : taskExecution.getMetrics()) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(TASK_METRIC_UPSERT_STATEMENT_TEMPLATE));
}
addMetricToBatch(upsertStatement.get(), metric, taskExecution.getTaskId());
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
}
}
executeBatches(upsertStatement);
}
use of org.apache.gobblin.rest.Metric in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreTest method create.
private JobExecutionInfo create(int index, boolean differentTableType) {
JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
jobExecutionInfo.setJobName("TestJob" + index);
jobExecutionInfo.setJobId(jobExecutionInfo.getJobName() + "_" + System.currentTimeMillis());
jobExecutionInfo.setStartTime(System.currentTimeMillis());
jobExecutionInfo.setState(JobStateEnum.PENDING);
jobExecutionInfo.setLaunchedTasks(2);
jobExecutionInfo.setCompletedTasks(0);
jobExecutionInfo.setLauncherType(LauncherTypeEnum.LOCAL);
jobExecutionInfo.setTrackingUrl("localhost");
MetricArray jobMetrics = new MetricArray();
Metric jobMetric1 = new Metric();
jobMetric1.setGroup("JOB");
jobMetric1.setName("jm1");
jobMetric1.setType(MetricTypeEnum.COUNTER);
jobMetric1.setValue("100");
jobMetrics.add(jobMetric1);
jobExecutionInfo.setMetrics(jobMetrics);
Map<String, String> jobProperties = Maps.newHashMap();
jobProperties.put("k" + index, "v" + index);
jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
TaskExecutionInfo taskExecutionInfo1 = new TaskExecutionInfo();
taskExecutionInfo1.setJobId(jobExecutionInfo.getJobId());
taskExecutionInfo1.setTaskId(jobExecutionInfo.getJobId() + "_0");
taskExecutionInfo1.setStartTime(System.currentTimeMillis());
taskExecutionInfo1.setState(TaskStateEnum.PENDING);
taskExecutionInfo1.setLowWatermark(0L);
taskExecutionInfo1.setHighWatermark(1000L);
Table table1 = new Table();
table1.setNamespace("Test");
table1.setName("Test1");
table1.setType(TableTypeEnum.SNAPSHOT_ONLY);
taskExecutionInfo1.setTable(table1);
MetricArray taskMetrics1 = new MetricArray();
Metric taskMetric1 = new Metric();
taskMetric1.setGroup("TASK");
taskMetric1.setName("tm1");
taskMetric1.setType(MetricTypeEnum.COUNTER);
taskMetric1.setValue("100");
taskMetrics1.add(taskMetric1);
taskExecutionInfo1.setMetrics(taskMetrics1);
Map<String, String> taskProperties1 = Maps.newHashMap();
taskProperties1.put("k1" + index, "v1" + index);
taskExecutionInfo1.setTaskProperties(new StringMap(taskProperties1));
taskExecutionInfos.add(taskExecutionInfo1);
TaskExecutionInfo taskExecutionInfo2 = new TaskExecutionInfo();
taskExecutionInfo2.setJobId(jobExecutionInfo.getJobId());
taskExecutionInfo2.setTaskId(jobExecutionInfo.getJobId() + "_1");
taskExecutionInfo2.setStartTime(System.currentTimeMillis());
taskExecutionInfo2.setState(TaskStateEnum.PENDING);
taskExecutionInfo2.setLowWatermark(0L);
taskExecutionInfo2.setHighWatermark(2000L);
Table table2 = new Table();
table2.setNamespace("Test");
table2.setName("Test2");
table2.setType(differentTableType ? TableTypeEnum.SNAPSHOT_APPEND : TableTypeEnum.SNAPSHOT_ONLY);
taskExecutionInfo2.setTable(table2);
MetricArray taskMetrics2 = new MetricArray();
Metric taskMetric2 = new Metric();
taskMetric2.setGroup("TASK");
taskMetric2.setName("tm2");
taskMetric2.setType(MetricTypeEnum.COUNTER);
taskMetric2.setValue("100");
taskMetrics2.add(taskMetric2);
taskExecutionInfo2.setMetrics(taskMetrics2);
Map<String, String> taskProperties2 = Maps.newHashMap();
taskProperties2.put("k2" + index, "v2" + index);
taskExecutionInfo2.setTaskProperties(new StringMap(taskProperties2));
taskExecutionInfos.add(taskExecutionInfo2);
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
this.expectedJobExecutionInfos.add(jobExecutionInfo);
return jobExecutionInfo;
}
Aggregations