use of org.springframework.jdbc.core.PreparedStatementCreator in project irida by phac-nml.
the class AutomatedSISTRUpdate method generateStatements.
@Override
public SqlStatement[] generateStatements(Database database) throws CustomChangeException {
logger.info("Reading existing automated SISTR results files to database. This could take a while...");
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
Map<String, Long> metadataHeaderIds = new HashMap<>();
int errorCount = 0;
// create the metadata headers
SISTR_FIELDS.entrySet().forEach(e -> {
GeneratedKeyHolder holder = new GeneratedKeyHolder();
jdbcTemplate.update(new PreparedStatementCreator() {
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
PreparedStatement statement = con.prepareStatement("INSERT INTO metadata_field (label, type) VALUES (?, 'text')", Statement.RETURN_GENERATED_KEYS);
statement.setString(1, e.getValue());
return statement;
}
}, holder);
// save the metadata header ids
metadataHeaderIds.put(e.getKey(), holder.getKey().longValue());
});
// get all the automated sistr results
List<SISTRFileResult> sistrFileResults = jdbcTemplate.query("select a.id, o.sample_id, of.file_path from sequencing_object s INNER JOIN analysis_submission a ON s.sistr_typing=a.id INNER JOIN sample_sequencingobject o ON o.sequencingobject_id=s.id INNER JOIN analysis_output_file_map f ON f.analysis_id=a.analysis_id INNER JOIN analysis_output_file of ON f.analysisOutputFilesMap_id=of.id WHERE f.analysis_output_file_key='sistr-predictions'", new RowMapper<SISTRFileResult>() {
@Override
public SISTRFileResult mapRow(ResultSet rs, int rowNum) throws SQLException {
SISTRFileResult sistrFileResult = new SISTRFileResult();
sistrFileResult.submissionId = rs.getLong(1);
sistrFileResult.sampleId = rs.getLong(2);
sistrFileResult.filePath = Paths.get(rs.getString(3));
return sistrFileResult;
}
});
// for each sistr result get the metadata
for (SISTRFileResult sistrFileResult : sistrFileResults) {
Path filePath = outputFileDirectory.resolve(sistrFileResult.filePath);
if (!filePath.toFile().exists()) {
logger.error("SISTR file " + filePath + " does not exist!");
errorCount++;
} else {
try {
// Read the JSON file from SISTR output
@SuppressWarnings("resouce") String jsonFile = new Scanner(new BufferedReader(new FileReader(filePath.toFile()))).useDelimiter("\\Z").next();
// map the results into a Map
ObjectMapper mapper = new ObjectMapper();
List<Map<String, Object>> sistrResults = mapper.readValue(jsonFile, new TypeReference<List<Map<String, Object>>>() {
});
if (sistrResults.size() > 0) {
Map<String, Object> result = sistrResults.get(0);
// loop through each of the requested fields and save the entries
SISTR_FIELDS.entrySet().forEach(e -> {
if (result.containsKey(e.getKey()) && result.get(e.getKey()) != null) {
String value = result.get(e.getKey()).toString();
// insert to metadata_entry
GeneratedKeyHolder holder = new GeneratedKeyHolder();
jdbcTemplate.update(new PreparedStatementCreator() {
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
PreparedStatement statement = con.prepareStatement("INSERT INTO metadata_entry (type, value) VALUES ('text', ?)", Statement.RETURN_GENERATED_KEYS);
statement.setString(1, value);
return statement;
}
}, holder);
// save the new entry id
long entryId = holder.getKey().longValue();
// insert the pipeline_metadata_entry
jdbcTemplate.update("INSERT INTO pipeline_metadata_entry (id, submission_id) VALUES (?,?)", entryId, sistrFileResult.submissionId);
// remove existing entries for this metadata key and sample
jdbcTemplate.update("DELETE FROM sample_metadata_entry WHERE sample_id=? AND metadata_KEY=?", sistrFileResult.sampleId, metadataHeaderIds.get(e.getKey()));
// associate with the sample
jdbcTemplate.update("INSERT INTO sample_metadata_entry (sample_id, metadata_id, metadata_KEY) VALUES (?, ?,?)", sistrFileResult.sampleId, entryId, metadataHeaderIds.get(e.getKey()));
}
});
} else {
logger.error("SISTR results for file are not correctly formatted: " + filePath);
}
} catch (IOException e) {
logger.error("Error parsing JSON from SISTR results", e);
}
}
}
if (errorCount > 0) {
logger.error("IRIDA could not read " + errorCount + " automated SISTR result files to update sample metadata. If these results are essential, check your file paths, restore a database backup, and retry the upgrade.");
}
return new SqlStatement[0];
}
use of org.springframework.jdbc.core.PreparedStatementCreator in project disconf by knightliao.
the class GenericDao method createAndFetchUpdateRow.
// FIXME
@SuppressWarnings("unchecked")
private int createAndFetchUpdateRow(ENTITY entity, final String sql, final List<Object> params) {
KeyHolder keyHolder = new GeneratedKeyHolder();
recordLog(sql);
// 执行操作
int rowCount = this.jdbcTemplate.update(new PreparedStatementCreator() {
public PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
PreparedStatement ps = connection.prepareStatement(sql);
int index = 1;
for (Object param : params) {
ps.setObject(index++, param);
}
return ps;
}
}, keyHolder);
// 如果插入成功则获取keyHolder中的key
if (rowCount != 0) {
Class<KEY> keyClass = orMapping.getKeyClass();
if (keyClass.equals(Integer.class)) {
entity.setId((KEY) Integer.valueOf(keyHolder.getKey().intValue()));
} else if (keyClass.equals(Long.class)) {
entity.setId((KEY) Long.valueOf(keyHolder.getKey().longValue()));
}
}
return rowCount;
}
use of org.springframework.jdbc.core.PreparedStatementCreator in project disconf by knightliao.
the class GenericDao method createAndFetchKey.
/**
* @param entity
* @param sql
* @param params
*/
@SuppressWarnings("unchecked")
private ENTITY createAndFetchKey(ENTITY entity, final String sql, final List<Object> params) {
KeyHolder keyHolder = new GeneratedKeyHolder();
recordLog(sql);
// 执行操作
int rowCount = this.jdbcTemplate.update(new PreparedStatementCreator() {
public PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
PreparedStatement ps = connection.prepareStatement(sql);
int index = 1;
for (Object param : params) {
ps.setObject(index++, param);
}
return ps;
}
}, keyHolder);
// 如果插入成功则获取keyHolder中的key
if (rowCount != 0 && keyHolder.getKey() != null) {
Class<KEY> keyClass = orMapping.getKeyClass();
if (keyClass.equals(Integer.class)) {
entity.setId((KEY) Integer.valueOf(keyHolder.getKey().intValue()));
} else if (keyClass.equals(Long.class)) {
entity.setId((KEY) Long.valueOf(keyHolder.getKey().longValue()));
}
}
return rowCount == 1 ? entity : null;
}
use of org.springframework.jdbc.core.PreparedStatementCreator in project nixmash-blog by mintster.
the class GithubJobUI method getCurrentGithubId.
public long getCurrentGithubId() {
KeyHolder keyHolder = new GeneratedKeyHolder();
String sql = "SELECT stat_id FROM github_stats WHERE stat_date = current_date()";
long statId = -1;
try {
statId = jdbcTemplate.queryForObject(sql, Long.class);
} catch (EmptyResultDataAccessException e) {
jdbcTemplate.update(new PreparedStatementCreator() {
String INSERT_SQL = "INSERT INTO github_stats (stat_date) VALUES (current_date())";
public PreparedStatement createPreparedStatement(Connection cn) throws SQLException {
PreparedStatement ps = cn.prepareStatement(INSERT_SQL, new String[] { "stat_id" });
return ps;
}
}, keyHolder);
statId = keyHolder.getKey().longValue();
}
logger.info("Current GitHub Stats ID: " + statId);
return statId;
}
Aggregations