use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreTest method testQueryByJobId.
@Test(dependsOnMethods = { "testUpdate" })
public void testQueryByJobId() throws IOException {
JobExecutionQuery queryByJobId = new JobExecutionQuery();
queryByJobId.setIdType(QueryIdTypeEnum.JOB_ID);
queryByJobId.setId(JobExecutionQuery.Id.create(this.expectedJobExecutionInfos.get(0).getJobId()));
List<JobExecutionInfo> result = this.jobHistoryStore.get(queryByJobId);
Assert.assertEquals(result.size(), 1);
JobExecutionInfo actual = result.get(0);
JobExecutionInfo expected = this.expectedJobExecutionInfos.get(0);
assertJobExecution(actual, expected);
}
use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreTest method create.
private JobExecutionInfo create(int index, boolean differentTableType) {
JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
jobExecutionInfo.setJobName("TestJob" + index);
jobExecutionInfo.setJobId(jobExecutionInfo.getJobName() + "_" + System.currentTimeMillis());
jobExecutionInfo.setStartTime(System.currentTimeMillis());
jobExecutionInfo.setState(JobStateEnum.PENDING);
jobExecutionInfo.setLaunchedTasks(2);
jobExecutionInfo.setCompletedTasks(0);
jobExecutionInfo.setLauncherType(LauncherTypeEnum.LOCAL);
jobExecutionInfo.setTrackingUrl("localhost");
MetricArray jobMetrics = new MetricArray();
Metric jobMetric1 = new Metric();
jobMetric1.setGroup("JOB");
jobMetric1.setName("jm1");
jobMetric1.setType(MetricTypeEnum.COUNTER);
jobMetric1.setValue("100");
jobMetrics.add(jobMetric1);
jobExecutionInfo.setMetrics(jobMetrics);
Map<String, String> jobProperties = Maps.newHashMap();
jobProperties.put("k" + index, "v" + index);
jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
TaskExecutionInfo taskExecutionInfo1 = new TaskExecutionInfo();
taskExecutionInfo1.setJobId(jobExecutionInfo.getJobId());
taskExecutionInfo1.setTaskId(jobExecutionInfo.getJobId() + "_0");
taskExecutionInfo1.setStartTime(System.currentTimeMillis());
taskExecutionInfo1.setState(TaskStateEnum.PENDING);
taskExecutionInfo1.setLowWatermark(0L);
taskExecutionInfo1.setHighWatermark(1000L);
Table table1 = new Table();
table1.setNamespace("Test");
table1.setName("Test1");
table1.setType(TableTypeEnum.SNAPSHOT_ONLY);
taskExecutionInfo1.setTable(table1);
MetricArray taskMetrics1 = new MetricArray();
Metric taskMetric1 = new Metric();
taskMetric1.setGroup("TASK");
taskMetric1.setName("tm1");
taskMetric1.setType(MetricTypeEnum.COUNTER);
taskMetric1.setValue("100");
taskMetrics1.add(taskMetric1);
taskExecutionInfo1.setMetrics(taskMetrics1);
Map<String, String> taskProperties1 = Maps.newHashMap();
taskProperties1.put("k1" + index, "v1" + index);
taskExecutionInfo1.setTaskProperties(new StringMap(taskProperties1));
taskExecutionInfos.add(taskExecutionInfo1);
TaskExecutionInfo taskExecutionInfo2 = new TaskExecutionInfo();
taskExecutionInfo2.setJobId(jobExecutionInfo.getJobId());
taskExecutionInfo2.setTaskId(jobExecutionInfo.getJobId() + "_1");
taskExecutionInfo2.setStartTime(System.currentTimeMillis());
taskExecutionInfo2.setState(TaskStateEnum.PENDING);
taskExecutionInfo2.setLowWatermark(0L);
taskExecutionInfo2.setHighWatermark(2000L);
Table table2 = new Table();
table2.setNamespace("Test");
table2.setName("Test2");
table2.setType(differentTableType ? TableTypeEnum.SNAPSHOT_APPEND : TableTypeEnum.SNAPSHOT_ONLY);
taskExecutionInfo2.setTable(table2);
MetricArray taskMetrics2 = new MetricArray();
Metric taskMetric2 = new Metric();
taskMetric2.setGroup("TASK");
taskMetric2.setName("tm2");
taskMetric2.setType(MetricTypeEnum.COUNTER);
taskMetric2.setValue("100");
taskMetrics2.add(taskMetric2);
taskExecutionInfo2.setMetrics(taskMetrics2);
Map<String, String> taskProperties2 = Maps.newHashMap();
taskProperties2.put("k2" + index, "v2" + index);
taskExecutionInfo2.setTaskProperties(new StringMap(taskProperties2));
taskExecutionInfos.add(taskExecutionInfo2);
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
this.expectedJobExecutionInfos.add(jobExecutionInfo);
return jobExecutionInfo;
}
use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class JobState method toJobExecutionInfo.
/**
* Convert this {@link JobState} instance to a {@link JobExecutionInfo} instance.
*
* @return a {@link JobExecutionInfo} instance
*/
public JobExecutionInfo toJobExecutionInfo() {
JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
jobExecutionInfo.setJobName(this.jobName);
jobExecutionInfo.setJobId(this.jobId);
if (this.startTime > 0) {
jobExecutionInfo.setStartTime(this.startTime);
}
if (this.endTime > 0) {
jobExecutionInfo.setEndTime(this.endTime);
}
jobExecutionInfo.setDuration(this.duration);
jobExecutionInfo.setState(JobStateEnum.valueOf(this.state.name()));
jobExecutionInfo.setLaunchedTasks(this.taskCount);
jobExecutionInfo.setCompletedTasks(this.getCompletedTasks());
jobExecutionInfo.setLauncherType(getLauncherType());
if (getTrackingURL().isPresent()) {
jobExecutionInfo.setTrackingUrl(getTrackingURL().get());
}
// Add task execution information
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
for (TaskState taskState : this.getTaskStates()) {
taskExecutionInfos.add(taskState.toTaskExecutionInfo());
}
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
// Add job metrics
JobMetrics jobMetrics = JobMetrics.get(this);
MetricArray metricArray = new MetricArray();
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics.getMetricContext().getCounters().entrySet()) {
Metric counter = new Metric();
counter.setGroup(MetricGroup.JOB.name());
counter.setName(entry.getKey());
counter.setType(MetricTypeEnum.valueOf(GobblinMetrics.MetricType.COUNTER.name()));
counter.setValue(Long.toString(((Counter) entry.getValue()).getCount()));
metricArray.add(counter);
}
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics.getMetricContext().getMeters().entrySet()) {
Metric meter = new Metric();
meter.setGroup(MetricGroup.JOB.name());
meter.setName(entry.getKey());
meter.setType(MetricTypeEnum.valueOf(GobblinMetrics.MetricType.METER.name()));
meter.setValue(Double.toString(((Meter) entry.getValue()).getMeanRate()));
metricArray.add(meter);
}
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics.getMetricContext().getGauges().entrySet()) {
Metric gauge = new Metric();
gauge.setGroup(MetricGroup.JOB.name());
gauge.setName(entry.getKey());
gauge.setType(MetricTypeEnum.valueOf(GobblinMetrics.MetricType.GAUGE.name()));
gauge.setValue(((Gauge<?>) entry.getValue()).getValue().toString());
metricArray.add(gauge);
}
jobExecutionInfo.setMetrics(metricArray);
// Add job properties
Map<String, String> jobProperties = Maps.newHashMap();
for (String name : this.getPropertyNames()) {
String value = this.getProp(name);
if (!Strings.isNullOrEmpty(value)) {
jobProperties.put(name, value);
}
}
jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
return jobExecutionInfo;
}
use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class JobStateTest method testToJobExecutionInfo.
@Test(dependsOnMethods = { "testSetAndGet" })
public void testToJobExecutionInfo() {
JobExecutionInfo jobExecutionInfo = this.jobState.toJobExecutionInfo();
Assert.assertEquals(jobExecutionInfo.getJobName(), "TestJob");
Assert.assertEquals(jobExecutionInfo.getJobId(), "TestJob-1");
Assert.assertEquals(jobExecutionInfo.getStartTime().longValue(), this.startTime);
Assert.assertEquals(jobExecutionInfo.getEndTime().longValue(), this.startTime + 1000);
Assert.assertEquals(jobExecutionInfo.getDuration().longValue(), 1000L);
Assert.assertEquals(jobExecutionInfo.getState().name(), JobState.RunningState.COMMITTED.name());
Assert.assertEquals(jobExecutionInfo.getLaunchedTasks().intValue(), 3);
Assert.assertEquals(jobExecutionInfo.getCompletedTasks().intValue(), 3);
Assert.assertEquals(jobExecutionInfo.getJobProperties().get("foo"), "bar");
List<String> taskStateIds = Lists.newArrayList();
for (TaskExecutionInfo taskExecutionInfo : jobExecutionInfo.getTaskExecutions()) {
Assert.assertEquals(taskExecutionInfo.getJobId(), "TestJob-1");
Assert.assertEquals(taskExecutionInfo.getStartTime().longValue(), this.startTime);
Assert.assertEquals(taskExecutionInfo.getEndTime().longValue(), this.startTime + 1000);
Assert.assertEquals(taskExecutionInfo.getDuration().longValue(), 1000);
Assert.assertEquals(taskExecutionInfo.getState().name(), WorkUnitState.WorkingState.COMMITTED.name());
Assert.assertEquals(taskExecutionInfo.getTaskProperties().get("foo"), "bar");
taskStateIds.add(taskExecutionInfo.getTaskId());
}
Collections.sort(taskStateIds);
Assert.assertEquals(taskStateIds, Lists.newArrayList("TestTask-0", "TestTask-1", "TestTask-2"));
}
use of org.apache.gobblin.rest.JobExecutionInfo in project incubator-gobblin by apache.
the class DatabaseJobHistoryStoreV100 method processListQuery.
private List<JobExecutionInfo> processListQuery(Connection connection, JobExecutionQuery query) throws SQLException {
Preconditions.checkArgument(query.getId().isQueryListType());
Filter timeRangeFilter = Filter.MISSING;
QueryListType queryType = query.getId().getQueryListType();
String listJobExecutionsQuery = "";
if (queryType == QueryListType.DISTINCT) {
listJobExecutionsQuery = LIST_DISTINCT_JOB_EXECUTION_QUERY_TEMPLATE;
if (query.hasTimeRange()) {
try {
timeRangeFilter = constructTimeRangeFilter(query.getTimeRange());
if (timeRangeFilter.isPresent()) {
listJobExecutionsQuery += " AND " + timeRangeFilter;
}
} catch (ParseException pe) {
LOGGER.error("Failed to parse the query time range", pe);
throw new SQLException(pe);
}
}
} else {
listJobExecutionsQuery = LIST_RECENT_JOB_EXECUTION_QUERY_TEMPLATE;
}
listJobExecutionsQuery += " ORDER BY last_modified_ts DESC";
try (PreparedStatement queryStatement = connection.prepareStatement(listJobExecutionsQuery)) {
int limit = query.getLimit();
if (limit > 0) {
queryStatement.setMaxRows(limit);
}
if (timeRangeFilter.isPresent()) {
timeRangeFilter.addParameters(queryStatement, 1);
}
try (ResultSet rs = queryStatement.executeQuery()) {
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
while (rs.next()) {
jobExecutionInfos.add(processQueryById(connection, rs.getString(1), query, Filter.MISSING));
}
return jobExecutionInfos;
}
}
}
Aggregations