use of org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity in project incubator-skywalking by apache.
the class GlobalTraceH2PersistenceDAO method prepareBatchInsert.
@Override
public H2SqlEntity prepareBatchInsert(GlobalTrace data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(GlobalTraceTable.COLUMN_ID, data.getId());
source.put(GlobalTraceTable.COLUMN_SEGMENT_ID, data.getSegmentId());
source.put(GlobalTraceTable.COLUMN_GLOBAL_TRACE_ID, data.getGlobalTraceId());
source.put(GlobalTraceTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
logger.debug("global trace source: {}", source.toString());
String sql = SqlBuilder.buildBatchInsertSql(GlobalTraceTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
use of org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity in project incubator-skywalking by apache.
the class InstanceHeartBeatH2PersistenceDAO method prepareBatchUpdate.
@Override
public H2SqlEntity prepareBatchUpdate(Instance data) {
H2SqlEntity entity = new H2SqlEntity();
Map<String, Object> source = new HashMap<>();
source.put(InstanceTable.COLUMN_HEARTBEAT_TIME, data.getHeartBeatTime());
String sql = SqlBuilder.buildBatchUpdateSql(InstanceTable.TABLE, source.keySet(), InstanceTable.COLUMN_INSTANCE_ID);
entity.setSql(sql);
List<Object> params = new ArrayList<>(source.values());
params.add(data.getId());
entity.setParams(params.toArray(new Object[0]));
return entity;
}
use of org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity in project incubator-skywalking by apache.
the class SegmentH2PersistenceDAO method prepareBatchInsert.
@Override
public H2SqlEntity prepareBatchInsert(Segment data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(SegmentTable.COLUMN_ID, data.getId());
source.put(SegmentTable.COLUMN_DATA_BINARY, data.getDataBinary());
source.put(SegmentTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
logger.debug("segment source: {}", source.toString());
String sql = SqlBuilder.buildBatchInsertSql(SegmentTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
use of org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity in project incubator-skywalking by apache.
the class BatchH2DAO method batchPersistence.
@Override
public void batchPersistence(List<?> batchCollection) {
if (batchCollection != null && batchCollection.size() > 0) {
logger.debug("the batch collection size is {}", batchCollection.size());
Connection conn;
final Map<String, PreparedStatement> batchSqls = new HashMap<>();
try {
conn = getClient().getConnection();
conn.setAutoCommit(true);
PreparedStatement ps;
for (Object entity : batchCollection) {
H2SqlEntity e = getH2SqlEntity(entity);
String sql = e.getSql();
if (batchSqls.containsKey(sql)) {
ps = batchSqls.get(sql);
} else {
ps = conn.prepareStatement(sql);
batchSqls.put(sql, ps);
}
Object[] params = e.getParams();
if (params != null) {
logger.debug("the sql is {}, params size is {}, params: {}", e.getSql(), params.length, params);
for (int i = 0; i < params.length; i++) {
ps.setObject(i + 1, params[i]);
}
}
ps.addBatch();
}
for (String k : batchSqls.keySet()) {
batchSqls.get(k).executeBatch();
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
batchSqls.clear();
}
}
use of org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity in project incubator-skywalking by apache.
the class SegmentDurationH2PersistenceDAO method prepareBatchInsert.
@Override
public H2SqlEntity prepareBatchInsert(SegmentDuration data) {
logger.debug("segment cost prepareBatchInsert, getApplicationId: {}", data.getId());
H2SqlEntity entity = new H2SqlEntity();
Map<String, Object> source = new HashMap<>();
source.put(SegmentDurationTable.COLUMN_ID, data.getId());
source.put(SegmentDurationTable.COLUMN_SEGMENT_ID, data.getSegmentId());
source.put(SegmentDurationTable.COLUMN_APPLICATION_ID, data.getApplicationId());
source.put(SegmentDurationTable.COLUMN_SERVICE_NAME, data.getServiceName());
source.put(SegmentDurationTable.COLUMN_DURATION, data.getDuration());
source.put(SegmentDurationTable.COLUMN_START_TIME, data.getStartTime());
source.put(SegmentDurationTable.COLUMN_END_TIME, data.getEndTime());
source.put(SegmentDurationTable.COLUMN_IS_ERROR, data.getIsError());
source.put(SegmentDurationTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
logger.debug("segment cost source: {}", source.toString());
String sql = SqlBuilder.buildBatchInsertSql(SegmentDurationTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
Aggregations