use of org.influxdb.dto.BatchPoints in project xDrip by NightscoutFoundation.
the class InfluxDBUploader method upload.
public boolean upload(List<BgReading> glucoseDataSets, List<Calibration> meterRecords, List<Calibration> calRecords) {
try {
BatchPoints batchPoints = BatchPoints.database(dbName).retentionPolicy("autogen").consistency(InfluxDB.ConsistencyLevel.ALL).build();
for (BgReading record : glucoseDataSets) {
if (record == null) {
Log.e(TAG, "InfluxDB glucose record is null");
continue;
}
batchPoints.point(createGlucosePoint(record));
}
for (Calibration record : meterRecords) {
if (record == null) {
Log.e(TAG, "InfluxDB meter record is null");
continue;
}
batchPoints.point(createMeterPoint(record));
}
for (Calibration record : calRecords) {
if (record == null) {
Log.e(TAG, "InfluxDB calibration record is null");
continue;
}
if (record.slope == 0d)
continue;
batchPoints.point(createCalibrationPoint(record));
}
try {
Log.d(TAG, "Influx url: " + dbUri);
InfluxDBFactory.connect(dbUri, dbUser, dbPassword, client).enableGzip().write(batchPoints);
last_error = null;
return true;
} catch (java.lang.ExceptionInInitializerError e) {
Log.e(TAG, "InfluxDB failed: " + e.getCause());
return false;
} catch (java.lang.NoClassDefFoundError e) {
Log.e(TAG, "InfluxDB failed more: " + e);
return false;
} catch (IllegalArgumentException e) {
Log.wtf(TAG, "InfluxDB problem: " + e);
return false;
} catch (Exception e) {
Log.e(TAG, "Write to InfluxDB failed: " + e);
last_error = e.getMessage();
return false;
}
} catch (Exception e) {
Log.wtf(TAG, "Exception during initialization: ", e);
return false;
}
}
use of org.influxdb.dto.BatchPoints in project xDrip-plus by jamorham.
the class InfluxDBUploader method upload.
public boolean upload(List<BgReading> glucoseDataSets, List<Calibration> meterRecords, List<Calibration> calRecords) {
try {
BatchPoints batchPoints = BatchPoints.database(dbName).retentionPolicy("autogen").consistency(InfluxDB.ConsistencyLevel.ALL).build();
for (BgReading record : glucoseDataSets) {
if (record == null) {
Log.e(TAG, "InfluxDB glucose record is null");
continue;
}
batchPoints.point(createGlucosePoint(record));
}
for (Calibration record : meterRecords) {
if (record == null) {
Log.e(TAG, "InfluxDB meter record is null");
continue;
}
batchPoints.point(createMeterPoint(record));
}
for (Calibration record : calRecords) {
if (record == null) {
Log.e(TAG, "InfluxDB calibration record is null");
continue;
}
if (record.slope == 0d)
continue;
batchPoints.point(createCalibrationPoint(record));
}
try {
Log.d(TAG, "Influx url: " + dbUri);
InfluxDBFactory.connect(dbUri, dbUser, dbPassword, client).enableGzip().write(batchPoints);
last_error = null;
return true;
} catch (java.lang.ExceptionInInitializerError e) {
Log.e(TAG, "InfluxDB failed: " + e.getCause());
return false;
} catch (java.lang.NoClassDefFoundError e) {
Log.e(TAG, "InfluxDB failed more: " + e);
return false;
} catch (IllegalArgumentException e) {
Log.wtf(TAG, "InfluxDB problem: " + e);
return false;
} catch (Exception e) {
Log.e(TAG, "Write to InfluxDB failed: " + e);
last_error = e.getMessage();
return false;
}
} catch (Exception e) {
Log.wtf(TAG, "Exception during initialization: ", e);
return false;
}
}
use of org.influxdb.dto.BatchPoints in project jvm-profiler by uber-common.
the class InfluxDBOutputReporter method report.
@Override
public void report(String profilerName, Map<String, Object> metrics) {
// get DB connection
ensureInfluxDBCon();
// format metrics
logger.info("Profiler Name : " + profilerName);
Map<String, Object> formattedMetrics = getFormattedMetrics(metrics);
for (Map.Entry<String, Object> entry : formattedMetrics.entrySet()) {
logger.info("Formatted Metric-Name = " + entry.getKey() + ", Metric-Value = " + entry.getValue());
}
// Point
Point point = Point.measurement(profilerName).time(System.currentTimeMillis(), TimeUnit.MILLISECONDS).fields(formattedMetrics).tag("processUuid", (String) metrics.get("processUuid")).build();
// BatchPoints
BatchPoints batchPoints = BatchPoints.database(database).consistency(ConsistencyLevel.ALL).retentionPolicy("autogen").build();
batchPoints.point(point);
// Write
this.influxDB.write(batchPoints);
}
use of org.influxdb.dto.BatchPoints in project camel by apache.
the class InfluxDbProducer method doInsert.
private void doInsert(Exchange exchange, String dataBaseName, String retentionPolicy) throws InvalidPayloadException {
if (!endpoint.isBatch()) {
Point p = exchange.getIn().getMandatoryBody(Point.class);
try {
LOG.debug("Writing point {}", p.lineProtocol());
connection.write(dataBaseName, retentionPolicy, p);
} catch (Exception ex) {
exchange.setException(new CamelInfluxDbException(ex));
}
} else {
BatchPoints batchPoints = exchange.getIn().getMandatoryBody(BatchPoints.class);
try {
LOG.debug("Writing BatchPoints {}", batchPoints.lineProtocol());
connection.write(batchPoints);
} catch (Exception ex) {
exchange.setException(new CamelInfluxDbException(ex));
}
}
}
use of org.influxdb.dto.BatchPoints in project openems by OpenEMS.
the class Influx method writeDataToOldMiniMonitoring.
/**
* Writes data to old database for old Mini monitoring
*
* XXX remove after full migration
*
* @param device
* @param data
* @throws OpenemsException
*/
private void writeDataToOldMiniMonitoring(Edge edge, int influxId, TreeBasedTable<Long, String, Object> data) throws OpenemsException {
InfluxDB influxDB = getInfluxDbConnection();
BatchPoints batchPoints = //
BatchPoints.database(database).tag("fems", //
String.valueOf(influxId)).build();
for (Entry<Long, Map<String, Object>> entry : data.rowMap().entrySet()) {
Long timestamp = entry.getKey();
Builder builder = Point.measurement(TMP_MINI_MEASUREMENT).time(timestamp, TimeUnit.MILLISECONDS);
Map<String, Object> fields = new HashMap<>();
for (Entry<String, Object> valueEntry : entry.getValue().entrySet()) {
String channel = valueEntry.getKey();
Object valueObj = valueEntry.getValue();
if (valueObj instanceof Number) {
Long value = ((Number) valueObj).longValue();
// convert channel ids to old identifiers
if (channel.equals("ess0/Soc")) {
fields.put("Stack_SOC", value);
edge.setSoc(value.intValue());
} else if (channel.equals("meter0/ActivePower")) {
fields.put("PCS_Grid_Power_Total", value * -1);
} else if (channel.equals("meter1/ActivePower")) {
fields.put("PCS_PV_Power_Total", value);
} else if (channel.equals("meter2/ActivePower")) {
fields.put("PCS_Load_Power_Total", value);
}
// from here value needs to be divided by 10 for backwards compatibility
value = value / 10;
if (channel.equals("meter2/Energy")) {
fields.put("PCS_Summary_Consumption_Accumulative_cor", value);
fields.put("PCS_Summary_Consumption_Accumulative", value);
} else if (channel.equals("meter0/BuyFromGridEnergy")) {
fields.put("PCS_Summary_Grid_Buy_Accumulative_cor", value);
fields.put("PCS_Summary_Grid_Buy_Accumulative", value);
} else if (channel.equals("meter0/SellToGridEnergy")) {
fields.put("PCS_Summary_Grid_Sell_Accumulative_cor", value);
fields.put("PCS_Summary_Grid_Sell_Accumulative", value);
} else if (channel.equals("meter1/EnergyL1")) {
fields.put("PCS_Summary_PV_Accumulative_cor", value);
fields.put("PCS_Summary_PV_Accumulative", value);
}
}
}
if (fields.size() > 0) {
builder.fields(fields);
batchPoints.point(builder.build());
}
}
// write to DB
influxDB.write(batchPoints);
}
Aggregations