Search in sources :

Example 1 with StringMap

use of com.linkedin.data.template.StringMap in project incubator-gobblin by apache.

the class DatabaseJobHistoryStoreV101 method addPropertiesToJobExecutions.

private void addPropertiesToJobExecutions(Connection connection, JobExecutionQuery query, Map<String, JobExecutionInfo> jobExecutionInfos) throws SQLException {
    if (jobExecutionInfos.size() > 0) {
        Set<String> propertyKeys = null;
        if (query.hasJobProperties()) {
            propertyKeys = Sets.newHashSet(Iterables.filter(Arrays.asList(query.getJobProperties().split(",")), new Predicate<String>() {

                @Override
                public boolean apply(String input) {
                    return !Strings.isNullOrEmpty(input);
                }
            }));
        }
        if (propertyKeys == null || propertyKeys.size() > 0) {
            String template = String.format(JOB_PROPERTY_QUERY_STATEMENT_TEMPLATE, getInPredicate(jobExecutionInfos.size()));
            if (propertyKeys != null && propertyKeys.size() > 0) {
                template += String.format(" AND property_key IN (%s)", getInPredicate(propertyKeys.size()));
            }
            int index = 1;
            try (PreparedStatement jobPropertiesQueryStatement = connection.prepareStatement(template)) {
                for (String jobId : jobExecutionInfos.keySet()) {
                    jobPropertiesQueryStatement.setString(index++, jobId);
                }
                if (propertyKeys != null && propertyKeys.size() > 0) {
                    for (String propertyKey : propertyKeys) {
                        jobPropertiesQueryStatement.setString(index++, propertyKey);
                    }
                }
                try (ResultSet jobPropertiesRs = jobPropertiesQueryStatement.executeQuery()) {
                    while (jobPropertiesRs.next()) {
                        String jobId = jobPropertiesRs.getString("job_id");
                        JobExecutionInfo jobExecutionInfo = jobExecutionInfos.get(jobId);
                        StringMap jobProperties = jobExecutionInfo.getJobProperties(GetMode.NULL);
                        if (jobProperties == null) {
                            jobProperties = new StringMap(Maps.<String, String>newHashMap());
                            jobExecutionInfo.setJobProperties(jobProperties);
                        }
                        Map.Entry<String, String> property = resultSetToProperty(jobPropertiesRs);
                        if (propertyKeys == null || propertyKeys.contains(property.getKey())) {
                            jobProperties.put(property.getKey(), property.getValue());
                        }
                    }
                }
            }
        }
    }
}
Also used : StringMap(com.linkedin.data.template.StringMap) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) JobExecutionInfo(org.apache.gobblin.rest.JobExecutionInfo) Map(java.util.Map) StringMap(com.linkedin.data.template.StringMap) AbstractMap(java.util.AbstractMap)

Example 2 with StringMap

use of com.linkedin.data.template.StringMap in project incubator-gobblin by apache.

the class DatabaseJobHistoryStoreV101 method put.

@Override
public synchronized void put(JobExecutionInfo jobExecutionInfo) throws IOException {
    Optional<Connection> connectionOptional = Optional.absent();
    try {
        connectionOptional = Optional.of(getConnection());
        Connection connection = connectionOptional.get();
        connection.setAutoCommit(false);
        // Insert or update job execution information
        upsertJobExecutionInfo(connection, jobExecutionInfo);
        upsertJobMetrics(connection, jobExecutionInfo);
        upsertJobProperties(connection, jobExecutionInfo);
        // Insert or update task execution information
        if (jobExecutionInfo.hasTaskExecutions()) {
            upsertTaskExecutionInfos(connection, jobExecutionInfo.getTaskExecutions());
            upsertTaskMetrics(connection, jobExecutionInfo.getTaskExecutions());
            Optional<StringMap> jobProperties = Optional.absent();
            if (jobExecutionInfo.hasJobProperties()) {
                jobProperties = Optional.of(jobExecutionInfo.getJobProperties());
            }
            upsertTaskProperties(connection, jobProperties, jobExecutionInfo.getTaskExecutions());
        }
        connection.commit();
    } catch (SQLException se) {
        LOGGER.error("Failed to put a new job execution information record", se);
        if (connectionOptional.isPresent()) {
            try {
                connectionOptional.get().rollback();
            } catch (SQLException se1) {
                LOGGER.error("Failed to rollback", se1);
            }
        }
        throw new IOException(se);
    } finally {
        if (connectionOptional.isPresent()) {
            try {
                connectionOptional.get().close();
            } catch (SQLException se) {
                LOGGER.error("Failed to close connection", se);
            }
        }
    }
}
Also used : StringMap(com.linkedin.data.template.StringMap) SQLException(java.sql.SQLException) Connection(java.sql.Connection) IOException(java.io.IOException)

Example 3 with StringMap

use of com.linkedin.data.template.StringMap in project incubator-gobblin by apache.

the class DatabaseJobHistoryStoreTest method create.

private JobExecutionInfo create(int index, boolean differentTableType) {
    JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
    jobExecutionInfo.setJobName("TestJob" + index);
    jobExecutionInfo.setJobId(jobExecutionInfo.getJobName() + "_" + System.currentTimeMillis());
    jobExecutionInfo.setStartTime(System.currentTimeMillis());
    jobExecutionInfo.setState(JobStateEnum.PENDING);
    jobExecutionInfo.setLaunchedTasks(2);
    jobExecutionInfo.setCompletedTasks(0);
    jobExecutionInfo.setLauncherType(LauncherTypeEnum.LOCAL);
    jobExecutionInfo.setTrackingUrl("localhost");
    MetricArray jobMetrics = new MetricArray();
    Metric jobMetric1 = new Metric();
    jobMetric1.setGroup("JOB");
    jobMetric1.setName("jm1");
    jobMetric1.setType(MetricTypeEnum.COUNTER);
    jobMetric1.setValue("100");
    jobMetrics.add(jobMetric1);
    jobExecutionInfo.setMetrics(jobMetrics);
    Map<String, String> jobProperties = Maps.newHashMap();
    jobProperties.put("k" + index, "v" + index);
    jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
    TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
    TaskExecutionInfo taskExecutionInfo1 = new TaskExecutionInfo();
    taskExecutionInfo1.setJobId(jobExecutionInfo.getJobId());
    taskExecutionInfo1.setTaskId(jobExecutionInfo.getJobId() + "_0");
    taskExecutionInfo1.setStartTime(System.currentTimeMillis());
    taskExecutionInfo1.setState(TaskStateEnum.PENDING);
    taskExecutionInfo1.setLowWatermark(0L);
    taskExecutionInfo1.setHighWatermark(1000L);
    Table table1 = new Table();
    table1.setNamespace("Test");
    table1.setName("Test1");
    table1.setType(TableTypeEnum.SNAPSHOT_ONLY);
    taskExecutionInfo1.setTable(table1);
    MetricArray taskMetrics1 = new MetricArray();
    Metric taskMetric1 = new Metric();
    taskMetric1.setGroup("TASK");
    taskMetric1.setName("tm1");
    taskMetric1.setType(MetricTypeEnum.COUNTER);
    taskMetric1.setValue("100");
    taskMetrics1.add(taskMetric1);
    taskExecutionInfo1.setMetrics(taskMetrics1);
    Map<String, String> taskProperties1 = Maps.newHashMap();
    taskProperties1.put("k1" + index, "v1" + index);
    taskExecutionInfo1.setTaskProperties(new StringMap(taskProperties1));
    taskExecutionInfos.add(taskExecutionInfo1);
    TaskExecutionInfo taskExecutionInfo2 = new TaskExecutionInfo();
    taskExecutionInfo2.setJobId(jobExecutionInfo.getJobId());
    taskExecutionInfo2.setTaskId(jobExecutionInfo.getJobId() + "_1");
    taskExecutionInfo2.setStartTime(System.currentTimeMillis());
    taskExecutionInfo2.setState(TaskStateEnum.PENDING);
    taskExecutionInfo2.setLowWatermark(0L);
    taskExecutionInfo2.setHighWatermark(2000L);
    Table table2 = new Table();
    table2.setNamespace("Test");
    table2.setName("Test2");
    table2.setType(differentTableType ? TableTypeEnum.SNAPSHOT_APPEND : TableTypeEnum.SNAPSHOT_ONLY);
    taskExecutionInfo2.setTable(table2);
    MetricArray taskMetrics2 = new MetricArray();
    Metric taskMetric2 = new Metric();
    taskMetric2.setGroup("TASK");
    taskMetric2.setName("tm2");
    taskMetric2.setType(MetricTypeEnum.COUNTER);
    taskMetric2.setValue("100");
    taskMetrics2.add(taskMetric2);
    taskExecutionInfo2.setMetrics(taskMetrics2);
    Map<String, String> taskProperties2 = Maps.newHashMap();
    taskProperties2.put("k2" + index, "v2" + index);
    taskExecutionInfo2.setTaskProperties(new StringMap(taskProperties2));
    taskExecutionInfos.add(taskExecutionInfo2);
    jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
    this.expectedJobExecutionInfos.add(jobExecutionInfo);
    return jobExecutionInfo;
}
Also used : StringMap(com.linkedin.data.template.StringMap) Table(org.apache.gobblin.rest.Table) TaskExecutionInfo(org.apache.gobblin.rest.TaskExecutionInfo) TaskExecutionInfoArray(org.apache.gobblin.rest.TaskExecutionInfoArray) MetricArray(org.apache.gobblin.rest.MetricArray) Metric(org.apache.gobblin.rest.Metric) JobExecutionInfo(org.apache.gobblin.rest.JobExecutionInfo)

Example 4 with StringMap

use of com.linkedin.data.template.StringMap in project incubator-gobblin by apache.

the class JobExecutionInfoServerTest method createJobExecutionInfo.

private static JobExecutionInfo createJobExecutionInfo(int index) {
    JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
    jobExecutionInfo.setJobName("TestJob" + index);
    jobExecutionInfo.setJobId(jobExecutionInfo.getJobName() + "_" + System.currentTimeMillis());
    jobExecutionInfo.setStartTime(System.currentTimeMillis());
    jobExecutionInfo.setState(JobStateEnum.PENDING);
    jobExecutionInfo.setLaunchedTasks(2);
    jobExecutionInfo.setCompletedTasks(0);
    MetricArray jobMetrics = new MetricArray();
    Metric jobMetric1 = new Metric();
    jobMetric1.setGroup("JOB");
    jobMetric1.setName("jm1");
    jobMetric1.setType(MetricTypeEnum.COUNTER);
    jobMetric1.setValue("100");
    jobMetrics.add(jobMetric1);
    jobExecutionInfo.setMetrics(jobMetrics);
    Map<String, String> jobProperties = Maps.newHashMap();
    jobProperties.put("k", "v");
    jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
    TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
    TaskExecutionInfo taskExecutionInfo1 = new TaskExecutionInfo();
    taskExecutionInfo1.setJobId(jobExecutionInfo.getJobId());
    taskExecutionInfo1.setTaskId(jobExecutionInfo.getJobId() + "_0");
    taskExecutionInfo1.setStartTime(System.currentTimeMillis());
    taskExecutionInfo1.setState(TaskStateEnum.PENDING);
    taskExecutionInfo1.setLowWatermark(0L);
    taskExecutionInfo1.setHighWatermark(1000L);
    Table table1 = new Table();
    table1.setNamespace("Test");
    table1.setName("Test1");
    table1.setType(TableTypeEnum.SNAPSHOT_ONLY);
    taskExecutionInfo1.setTable(table1);
    MetricArray taskMetrics1 = new MetricArray();
    Metric taskMetric1 = new Metric();
    taskMetric1.setGroup("TASK");
    taskMetric1.setName("tm1");
    taskMetric1.setType(MetricTypeEnum.COUNTER);
    taskMetric1.setValue("100");
    taskMetrics1.add(taskMetric1);
    taskExecutionInfo1.setMetrics(taskMetrics1);
    Map<String, String> taskProperties1 = Maps.newHashMap();
    taskProperties1.put("k1", "v1");
    taskExecutionInfo1.setTaskProperties(new StringMap(taskProperties1));
    taskExecutionInfos.add(taskExecutionInfo1);
    TaskExecutionInfo taskExecutionInfo2 = new TaskExecutionInfo();
    taskExecutionInfo2.setJobId(jobExecutionInfo.getJobId());
    taskExecutionInfo2.setTaskId(jobExecutionInfo.getJobId() + "_1");
    taskExecutionInfo2.setStartTime(System.currentTimeMillis());
    taskExecutionInfo2.setState(TaskStateEnum.PENDING);
    taskExecutionInfo2.setLowWatermark(0L);
    taskExecutionInfo2.setHighWatermark(2000L);
    Table table2 = new Table();
    table2.setNamespace("Test");
    table2.setName("Test2");
    table2.setType(TableTypeEnum.SNAPSHOT_ONLY);
    taskExecutionInfo2.setTable(table2);
    MetricArray taskMetrics2 = new MetricArray();
    Metric taskMetric2 = new Metric();
    taskMetric2.setGroup("TASK");
    taskMetric2.setName("tm2");
    taskMetric2.setType(MetricTypeEnum.COUNTER);
    taskMetric2.setValue("100");
    taskMetrics2.add(taskMetric2);
    taskExecutionInfo2.setMetrics(taskMetrics2);
    Map<String, String> taskProperties2 = Maps.newHashMap();
    taskProperties2.put("k2", "v2");
    taskExecutionInfo2.setTaskProperties(new StringMap(taskProperties2));
    taskExecutionInfos.add(taskExecutionInfo2);
    jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
    return jobExecutionInfo;
}
Also used : StringMap(com.linkedin.data.template.StringMap)

Example 5 with StringMap

use of com.linkedin.data.template.StringMap in project incubator-gobblin by apache.

the class FlowConfigTest method testCreateAgain.

@Test(dependsOnMethods = "testCreate")
public void testCreateAgain() throws Exception {
    Map<String, String> flowProperties = Maps.newHashMap();
    flowProperties.put("param1", "value1");
    FlowConfig flowConfig = new FlowConfig().setId(new FlowId().setFlowGroup(TEST_GROUP_NAME).setFlowName(TEST_FLOW_NAME)).setTemplateUris(TEST_TEMPLATE_URI).setSchedule(new Schedule().setCronSchedule(TEST_SCHEDULE)).setProperties(new StringMap(flowProperties));
    try {
        _client.createFlowConfig(flowConfig);
    } catch (RestLiResponseException e) {
        Assert.fail("Create Again should pass without complaining that the spec already exists.");
    }
}
Also used : StringMap(com.linkedin.data.template.StringMap) RestLiResponseException(com.linkedin.restli.client.RestLiResponseException) Test(org.testng.annotations.Test)

Aggregations

StringMap (com.linkedin.data.template.StringMap)47 Test (org.testng.annotations.Test)26 RestLiResponseException (com.linkedin.restli.client.RestLiResponseException)9 FlowConfig (org.apache.gobblin.service.FlowConfig)9 FlowId (org.apache.gobblin.service.FlowId)9 Schedule (org.apache.gobblin.service.Schedule)9 Map (java.util.Map)8 IndividualRequest (com.linkedin.restli.common.multiplexer.IndividualRequest)6 IndividualRequestMap (com.linkedin.restli.common.multiplexer.IndividualRequestMap)5 TaskExecutionInfo (org.apache.gobblin.rest.TaskExecutionInfo)5 ByteString (com.linkedin.data.ByteString)4 DataMap (com.linkedin.data.DataMap)4 Meter (com.codahale.metrics.Meter)3 RestRequest (com.linkedin.r2.message.rest.RestRequest)3 RoutingResult (com.linkedin.restli.internal.server.RoutingResult)3 PreparedStatement (java.sql.PreparedStatement)3 AbstractMap (java.util.AbstractMap)3 JobExecutionInfo (org.apache.gobblin.rest.JobExecutionInfo)3 Metric (org.apache.gobblin.rest.Metric)3 MetricArray (org.apache.gobblin.rest.MetricArray)3