use of com.eveningoutpost.dexdrip.Models.BgReading in project xDrip-plus by jamorham.
the class BestGlucose method getDisplayGlucose.
// note we don't support the original depreciated "predictive" mode
// TODO stale data
// TODO time ago
// TODO check BGgraph data is current
// TODO internalize delta handling to handle irregular periods and missing data plugins etc
// TODO see getSlopeArrowSymbolBeforeCalibration for calculation method for arbitary slope
// TODO option to process noise or not
// TODO check what happens if there is only a single entry, especially regarding delta
// TODO select by time
public static DisplayGlucose getDisplayGlucose() {
if (prefs == null)
prefs = PreferenceManager.getDefaultSharedPreferences(xdrip.getAppContext());
// return value
final DisplayGlucose dg = new DisplayGlucose();
final boolean doMgdl = (prefs.getString("units", "mgdl").equals("mgdl"));
final boolean is_follower = Home.get_follower();
dg.doMgDl = doMgdl;
List<BgReading> last_2 = BgReading.latest(2);
final BgReading lastBgReading = BgReading.last(is_follower);
if (lastBgReading == null)
return null;
final CalibrationAbstract.CalibrationData pcalibration;
final CalibrationAbstract plugin = getCalibrationPluginFromPreferences();
double estimate = -1;
double filtered = -1;
long timestamp = -1;
double previous_estimate = -1;
double previous_filtered = -1;
long previous_timestamp = -1;
// normal first
estimate = lastBgReading.calculated_value;
filtered = lastBgReading.filtered_calculated_value;
timestamp = lastBgReading.timestamp;
if (last_2.size() == 2) {
previous_estimate = last_2.get(1).calculated_value;
previous_filtered = last_2.get(1).filtered_calculated_value;
previous_timestamp = last_2.get(1).timestamp;
}
dg.mssince = JoH.msSince(lastBgReading.timestamp);
dg.timestamp = lastBgReading.timestamp;
// if we are actively using a plugin, get the glucose calculation from there
if ((plugin != null) && ((pcalibration = plugin.getCalibrationData()) != null) && (Pref.getBoolean("display_glucose_from_plugin", false))) {
dg.plugin_name = plugin.getAlgorithmName();
Log.d(TAG, "Using plugin: " + dg.plugin_name);
dg.from_plugin = true;
estimate = plugin.getGlucoseFromBgReading(lastBgReading, pcalibration);
filtered = plugin.getGlucoseFromFilteredBgReading(lastBgReading, pcalibration);
// also try to update the previous values in the same way
if (last_2.size() == 2) {
previous_estimate = plugin.getGlucoseFromBgReading(last_2.get(1), pcalibration);
previous_filtered = plugin.getGlucoseFromFilteredBgReading(last_2.get(1), pcalibration);
}
}
int warning_level = 0;
String slope_arrow = "";
String slope_name = "";
String extrastring = "";
double estimated_delta = 0;
// TODO refresh bggraph if needed based on cache - observe
// should this be conditional on whether bg_compensate_noise is set?
BgGraphBuilder.refreshNoiseIfOlderThan(dg.timestamp);
dg.noise = BgGraphBuilder.last_noise;
boolean bg_from_filtered = prefs.getBoolean("bg_from_filtered", false);
// if noise has settled down then switch off filtered mode
if ((bg_from_filtered) && (BgGraphBuilder.last_noise < BgGraphBuilder.NOISE_FORGIVE) && (prefs.getBoolean("bg_compensate_noise", false))) {
bg_from_filtered = false;
prefs.edit().putBoolean("bg_from_filtered", false).apply();
}
// TODO Noise uses plugin in bggraphbuilder
if (compensateNoise()) {
// this maybe needs scaling based on noise intensity
estimate = BgGraphBuilder.best_bg_estimate;
estimated_delta = BgGraphBuilder.best_bg_estimate - BgGraphBuilder.last_bg_estimate;
// TODO handle ratio when period is not dexcom period?
double estimated_delta_by_minute = estimated_delta / (BgGraphBuilder.DEXCOM_PERIOD / 60000);
dg.slope = estimated_delta_by_minute / 60000;
dg.unitized_delta_no_units = BgGraphBuilder.unitizedDeltaStringRaw(false, true, estimated_delta, doMgdl);
// TODO optimize adding units
dg.unitized_delta = BgGraphBuilder.unitizedDeltaStringRaw(true, true, estimated_delta, doMgdl);
// delta by minute
slope_arrow = BgReading.slopeToArrowSymbol(estimated_delta_by_minute);
slope_name = BgReading.slopeName(estimated_delta_by_minute);
// warning symbol !
extrastring = "\u26A0";
warning_level = 1;
if ((BgGraphBuilder.last_noise > BgGraphBuilder.NOISE_HIGH) && (DexCollectionType.hasFiltered())) {
// force filtered mode
bg_from_filtered = true;
}
} else {
// TODO ignores plugin
// dg.unitized_delta = BgGraphBuilder.unitizedDeltaString(true, true, is_follower , doMgdl);
dg.unitized_delta_no_units = unitizedDeltaString(false, true, doMgdl, estimate, timestamp, previous_estimate, previous_timestamp);
// TODO time stretch adjustment?
estimated_delta = estimate - previous_estimate;
// TODO optimize adding units
dg.unitized_delta = unitizedDeltaString(true, true, doMgdl, estimate, timestamp, previous_estimate, previous_timestamp);
long time_delta = timestamp - previous_timestamp;
if (time_delta < 0)
Log.wtf(TAG, "Time delta is negative! : " + time_delta);
// slope_arrow = lastBgReading.slopeArrow(); // internalize this for plugins
double slope = calculateSlope(estimate, timestamp, previous_estimate, previous_timestamp);
dg.slope = slope;
// slope by minute
slope_arrow = BgReading.slopeToArrowSymbol(slope * 60000);
slope_name = BgReading.slopeName(slope * 60000);
Log.d(TAG, "No noise option slope by minute: " + JoH.qs(slope * 60000, 5));
}
// TODO bit more work on deltas etc needed here
if (bg_from_filtered) {
estimate = filtered;
warning_level = 2;
}
dg.unitized_value = BgGraphBuilder.unitized(estimate, doMgdl);
final String stringEstimate = BgGraphBuilder.unitized_string(estimate, doMgdl);
if ((lastBgReading.hide_slope) || (bg_from_filtered)) {
slope_arrow = "";
slope_name = "NOT COMPUTABLE";
}
dg.mgdl = estimate;
dg.delta_mgdl = estimated_delta;
dg.warning = warning_level;
dg.unitized = stringEstimate;
dg.delta_arrow = slope_arrow;
dg.extra_string = extrastring;
dg.delta_name = slope_name;
// to be moved one day
if (!SensorSanity.isRawValueSane(lastBgReading.raw_data)) {
dg.delta_arrow = "!";
dg.unitized = ">!?";
dg.mgdl = 0;
dg.delta_mgdl = 0;
dg.unitized_value = 0;
dg.unitized_delta = "";
dg.slope = 0;
if (JoH.ratelimit("exceeding_max_raw", 120)) {
UserError.Log.wtf(TAG, "Failing raw bounds validation: " + lastBgReading.raw_data);
}
}
if (d)
Log.d(TAG, "dg result: " + dg.unitized + " previous: " + BgGraphBuilder.unitized_string(previous_estimate, doMgdl));
return dg;
}
use of com.eveningoutpost.dexdrip.Models.BgReading in project xDrip-plus by jamorham.
the class FakeNumbers method addListenerOnButton.
public void addListenerOnButton() {
button = (Button) findViewById(R.id.log);
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
EditText value = (EditText) findViewById(R.id.bg_value);
int intValue = Integer.parseInt(value.getText().toString());
int filterdValue = intValue;
if (intValue > 200) {
filterdValue = (int) (filterdValue * 1.2);
}
BgReading bgReading = BgReading.create(intValue * 1000, filterdValue * 1000, getApplicationContext(), new Date().getTime());
Intent intent = new Intent(getApplicationContext(), Home.class);
startActivity(intent);
finish();
}
});
button = (Button) findViewById(R.id.StartTest);
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
ActiveBgAlert aba = ActiveBgAlert.getOnly();
ActiveBgAlert.ClearData();
ActiveBgAlert.Create("some string", true, new Date().getTime());
}
});
button = (Button) findViewById(R.id.StartTestAlerts);
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
AlertType.testAll(getApplicationContext());
BgReading.TestgetUnclearTimes();
}
});
}
use of com.eveningoutpost.dexdrip.Models.BgReading in project xDrip-plus by jamorham.
the class NightscoutUploader method doMongoUpload.
private boolean doMongoUpload(SharedPreferences prefs, List<BgReading> glucoseDataSets, List<Calibration> meterRecords, List<Calibration> calRecords) {
final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.US);
format.setTimeZone(TimeZone.getDefault());
final String dbURI = prefs.getString("cloud_storage_mongodb_uri", null);
if (dbURI != null) {
try {
final URI uri = new URI(dbURI.trim());
if ((uri.getHost().startsWith("192.168.")) && prefs.getBoolean("skip_lan_uploads_when_no_lan", true) && (!JoH.isLANConnected())) {
Log.d(TAG, "Skipping mongo upload to: " + dbURI + " due to no LAN connection");
return false;
}
} catch (URISyntaxException e) {
UserError.Log.e(TAG, "Invalid mongo URI: " + e);
}
}
final String collectionName = prefs.getString("cloud_storage_mongodb_collection", null);
final String dsCollectionName = prefs.getString("cloud_storage_mongodb_device_status_collection", "devicestatus");
if (dbURI != null && collectionName != null) {
try {
// connect to db
MongoClientURI uri = new MongoClientURI(dbURI.trim() + "?socketTimeoutMS=180000");
MongoClient client = new MongoClient(uri);
// get db
DB db = client.getDB(uri.getDatabase());
// get collection
DBCollection dexcomData = db.getCollection(collectionName.trim());
try {
Log.i(TAG, "The number of EGV records being sent to MongoDB is " + glucoseDataSets.size());
for (BgReading record : glucoseDataSets) {
// make db object
BasicDBObject testData = new BasicDBObject();
testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
if (record != null) {
// KS
testData.put("date", record.timestamp);
testData.put("dateString", format.format(record.timestamp));
testData.put("sgv", Math.round(record.calculated_value));
testData.put("direction", record.slopeName());
testData.put("type", "sgv");
testData.put("filtered", record.ageAdjustedFiltered() * 1000);
testData.put("unfiltered", record.usedRaw() * 1000);
testData.put("rssi", 100);
testData.put("noise", record.noiseValue());
dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
} else
Log.e(TAG, "MongoDB BG record is null.");
}
Log.i(TAG, "The number of MBG records being sent to MongoDB is " + meterRecords.size());
for (Calibration meterRecord : meterRecords) {
// make db object
BasicDBObject testData = new BasicDBObject();
testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
testData.put("type", "mbg");
testData.put("date", meterRecord.timestamp);
testData.put("dateString", format.format(meterRecord.timestamp));
testData.put("mbg", meterRecord.bg);
dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
}
for (Calibration calRecord : calRecords) {
// do not upload undefined slopes
if (calRecord.slope == 0d)
break;
// make db object
BasicDBObject testData = new BasicDBObject();
testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
testData.put("date", calRecord.timestamp);
testData.put("dateString", format.format(calRecord.timestamp));
if (calRecord.check_in) {
testData.put("slope", (calRecord.first_slope));
testData.put("intercept", ((calRecord.first_intercept)));
testData.put("scale", calRecord.first_scale);
} else {
testData.put("slope", (1000 / calRecord.slope));
testData.put("intercept", ((calRecord.intercept * -1000) / (calRecord.slope)));
testData.put("scale", 1);
}
testData.put("type", "cal");
dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
}
// TODO: quick port from original code, revisit before release
DBCollection dsCollection = db.getCollection(dsCollectionName);
BasicDBObject devicestatus = new BasicDBObject();
devicestatus.put("uploaderBattery", getBatteryLevel());
devicestatus.put("created_at", format.format(System.currentTimeMillis()));
dsCollection.insert(devicestatus, WriteConcern.UNACKNOWLEDGED);
// treatments mongo sync using unified queue
Log.d(TAG, "Starting treatments mongo direct");
final long THIS_QUEUE = UploaderQueue.MONGO_DIRECT;
final DBCollection treatmentDb = db.getCollection("treatments");
final List<UploaderQueue> tups = UploaderQueue.getPendingbyType(Treatments.class.getSimpleName(), THIS_QUEUE);
if (tups != null) {
for (UploaderQueue up : tups) {
if ((up.action.equals("insert") || (up.action.equals("update")))) {
Treatments treatment = Treatments.byid(up.reference_id);
if (treatment != null) {
BasicDBObject record = new BasicDBObject();
record.put("timestamp", treatment.timestamp);
record.put("eventType", treatment.eventType);
record.put("enteredBy", treatment.enteredBy);
if (treatment.notes != null)
record.put("notes", treatment.notes);
record.put("uuid", treatment.uuid);
record.put("carbs", treatment.carbs);
record.put("insulin", treatment.insulin);
record.put("created_at", treatment.created_at);
final BasicDBObject searchQuery = new BasicDBObject().append("uuid", treatment.uuid);
// treatmentDb.insert(record, WriteConcern.UNACKNOWLEDGED);
Log.d(TAG, "Sending upsert for: " + treatment.toJSON());
treatmentDb.update(searchQuery, record, true, false);
} else {
Log.d(TAG, "Got null for treatment id: " + up.reference_id);
}
up.completed(THIS_QUEUE);
} else if (up.action.equals("delete")) {
if (up.reference_uuid != null) {
Log.d(TAG, "Processing treatment delete mongo sync for: " + up.reference_uuid);
final BasicDBObject searchQuery = new BasicDBObject().append("uuid", up.reference_uuid);
Log.d(TAG, treatmentDb.remove(searchQuery, WriteConcern.UNACKNOWLEDGED).toString());
}
up.completed(THIS_QUEUE);
} else {
Log.e(TAG, "Unsupported operation type for treatment: " + up.action);
}
}
Log.d(TAG, "Processed " + tups.size() + " Treatment mongo direct upload records");
}
client.close();
failurecount = 0;
return true;
} catch (Exception e) {
Log.e(TAG, "Unable to upload data to mongo " + e.getMessage());
failurecount++;
if (failurecount > 4) {
Home.toaststaticnext("Mongo " + failurecount + " up fails: " + e.getMessage().substring(0, 51));
}
} finally {
if (client != null) {
client.close();
}
}
} catch (Exception e) {
Log.e(TAG, "Unable to upload data to mongo " + e.getMessage());
}
}
return false;
}
use of com.eveningoutpost.dexdrip.Models.BgReading in project xDrip-plus by jamorham.
the class NightscoutUploader method doRESTUploadTo.
private void doRESTUploadTo(NightscoutService nightscoutService, String secret, List<BgReading> glucoseDataSets, List<BloodTest> meterRecords, List<Calibration> calRecords) throws Exception {
final JSONArray array = new JSONArray();
for (BgReading record : glucoseDataSets) {
populateV1APIBGEntry(array, record);
}
for (BloodTest record : meterRecords) {
populateV1APIMeterReadingEntry(array, record);
}
for (Calibration record : calRecords) {
final BloodTest dupe = BloodTest.getForPreciseTimestamp(record.timestamp, 60000);
if (dupe == null) {
// also add calibrations as meter records
populateV1APIMeterReadingEntry(array, record);
} else {
Log.d(TAG, "Found duplicate blood test entry for this calibration record: " + record.bg + " vs " + dupe.mgdl + " mg/dl");
}
populateV1APICalibrationEntry(array, record);
}
if (array.length() > 0) {
// KS
final RequestBody body = RequestBody.create(MediaType.parse("application/json"), array.toString());
final Response<ResponseBody> r = nightscoutService.upload(secret, body).execute();
if (!r.isSuccess())
throw new UploaderException(r.message(), r.code());
checkGzipSupport(r);
try {
postDeviceStatus(nightscoutService, secret);
} catch (Exception e) {
Log.e(TAG, "Ignoring devicestatus post exception: " + e);
}
}
try {
if (Pref.getBooleanDefaultFalse("send_treatments_to_nightscout")) {
postTreatments(nightscoutService, secret);
} else {
Log.d(TAG, "Skipping treatment upload due to preference disabled");
}
} catch (Exception e) {
Log.e(TAG, "Exception uploading REST API treatments: " + e.getMessage());
if (e.getMessage().equals("Not Found")) {
final String msg = "Please ensure careportal plugin is enabled on nightscout for treatment upload!";
Log.wtf(TAG, msg);
Home.toaststaticnext(msg);
handleRestFailure(msg);
}
}
// TODO in the future we may want to merge these in to a single post
if (Pref.getBooleanDefaultFalse("use_pebble_health") && (Home.get_engineering_mode())) {
try {
postHeartRate(nightscoutService, secret);
postStepsCount(nightscoutService, secret);
postMotionTracking(nightscoutService, secret);
} catch (Exception e) {
if (JoH.ratelimit("heartrate-upload-exception", 3600)) {
Log.e(TAG, "Exception uploading REST API heartrate: " + e.getMessage());
}
}
}
}
use of com.eveningoutpost.dexdrip.Models.BgReading in project xDrip-plus by jamorham.
the class UploaderTask method doInBackground.
public Void doInBackground(String... urls) {
try {
final List<Long> circuits = new ArrayList<>();
final List<String> types = new ArrayList<>();
types.add(BgReading.class.getSimpleName());
types.add(Calibration.class.getSimpleName());
types.add(BloodTest.class.getSimpleName());
types.add(Treatments.class.getSimpleName());
if (Pref.getBooleanDefaultFalse("wear_sync")) {
circuits.add(UploaderQueue.WATCH_WEARAPI);
}
if (Pref.getBooleanDefaultFalse("cloud_storage_mongodb_enable")) {
circuits.add(UploaderQueue.MONGO_DIRECT);
}
if (Pref.getBooleanDefaultFalse("cloud_storage_api_enable")) {
if ((Pref.getBoolean("cloud_storage_api_use_mobile", true) || (JoH.isLANConnected()))) {
circuits.add(UploaderQueue.NIGHTSCOUT_RESTAPI);
} else {
Log.e(TAG, "Skipping Nightscout upload due to mobile data only");
}
}
if (Pref.getBooleanDefaultFalse("cloud_storage_influxdb_enable")) {
circuits.add(UploaderQueue.INFLUXDB_RESTAPI);
}
for (long THIS_QUEUE : circuits) {
final List<BgReading> bgReadings = new ArrayList<>();
final List<Calibration> calibrations = new ArrayList<>();
final List<BloodTest> bloodtests = new ArrayList<>();
final List<Treatments> treatmentsAdd = new ArrayList<>();
final List<String> treatmentsDel = new ArrayList<>();
final List<UploaderQueue> items = new ArrayList<>();
for (String type : types) {
final List<UploaderQueue> bgups = UploaderQueue.getPendingbyType(type, THIS_QUEUE);
if (bgups != null) {
for (UploaderQueue up : bgups) {
switch(up.action) {
case "insert":
case "update":
case "create":
items.add(up);
if (type.equals(BgReading.class.getSimpleName())) {
final BgReading this_bg = BgReading.byid(up.reference_id);
if (this_bg != null) {
bgReadings.add(this_bg);
} else {
Log.wtf(TAG, "BgReading with ID: " + up.reference_id + " appears to have been deleted");
}
} else if (type.equals(Calibration.class.getSimpleName())) {
final Calibration this_cal = Calibration.byid(up.reference_id);
if ((this_cal != null) && (this_cal.isValid())) {
calibrations.add(this_cal);
} else {
Log.wtf(TAG, "Calibration with ID: " + up.reference_id + " appears to have been deleted");
}
} else if (type.equals(BloodTest.class.getSimpleName())) {
final BloodTest this_bt = BloodTest.byid(up.reference_id);
if (this_bt != null) {
bloodtests.add(this_bt);
} else {
Log.wtf(TAG, "Bloodtest with ID: " + up.reference_id + " appears to have been deleted");
}
} else if (type.equals(Treatments.class.getSimpleName())) {
final Treatments this_treat = Treatments.byid(up.reference_id);
if (this_treat != null) {
treatmentsAdd.add(this_treat);
} else {
Log.wtf(TAG, "Treatments with ID: " + up.reference_id + " appears to have been deleted");
}
}
break;
case "delete":
if ((THIS_QUEUE == UploaderQueue.WATCH_WEARAPI || THIS_QUEUE == UploaderQueue.NIGHTSCOUT_RESTAPI) && type.equals(Treatments.class.getSimpleName())) {
items.add(up);
Log.wtf(TAG, "Delete Treatments with ID: " + up.reference_uuid);
treatmentsDel.add(up.reference_uuid);
} else if (up.reference_uuid != null) {
Log.d(TAG, UploaderQueue.getCircuitName(THIS_QUEUE) + " delete not yet implemented: " + up.reference_uuid);
// mark as completed so as not to tie up the queue for now
up.completed(THIS_QUEUE);
}
break;
default:
Log.e(TAG, "Unsupported operation type for " + type + " " + up.action);
break;
}
}
}
}
if ((bgReadings.size() > 0) || (calibrations.size() > 0) || (bloodtests.size() > 0) || (treatmentsAdd.size() > 0 || treatmentsDel.size() > 0) || (UploaderQueue.getPendingbyType(Treatments.class.getSimpleName(), THIS_QUEUE, 1).size() > 0)) {
Log.d(TAG, UploaderQueue.getCircuitName(THIS_QUEUE) + " Processing: " + bgReadings.size() + " BgReadings and " + calibrations.size() + " Calibrations " + bloodtests.size() + " bloodtests " + treatmentsAdd.size() + " treatmentsAdd " + treatmentsDel.size() + " treatmentsDel");
boolean uploadStatus = false;
if (THIS_QUEUE == UploaderQueue.MONGO_DIRECT) {
final NightscoutUploader uploader = new NightscoutUploader(xdrip.getAppContext());
uploadStatus = uploader.uploadMongo(bgReadings, calibrations, calibrations);
} else if (THIS_QUEUE == UploaderQueue.NIGHTSCOUT_RESTAPI) {
final NightscoutUploader uploader = new NightscoutUploader(xdrip.getAppContext());
uploadStatus = uploader.uploadRest(bgReadings, bloodtests, calibrations);
} else if (THIS_QUEUE == UploaderQueue.INFLUXDB_RESTAPI) {
final InfluxDBUploader influxDBUploader = new InfluxDBUploader(xdrip.getAppContext());
uploadStatus = influxDBUploader.upload(bgReadings, calibrations, calibrations);
} else if (THIS_QUEUE == UploaderQueue.WATCH_WEARAPI) {
uploadStatus = WatchUpdaterService.sendWearUpload(bgReadings, calibrations, bloodtests, treatmentsAdd, treatmentsDel);
}
// TODO some kind of fail counter?
if (uploadStatus) {
for (UploaderQueue up : items) {
// approve all types for this queue
up.completed(THIS_QUEUE);
}
Log.d(TAG, UploaderQueue.getCircuitName(THIS_QUEUE) + " Marking: " + items.size() + " Items as successful");
if (PersistentStore.getBoolean(BACKFILLING_BOOSTER)) {
Log.d(TAG, "Scheduling boosted repeat query");
SyncService.startSyncService(2000);
}
}
} else {
Log.d(TAG, "Nothing to upload for: " + UploaderQueue.getCircuitName(THIS_QUEUE));
if (PersistentStore.getBoolean(BACKFILLING_BOOSTER)) {
PersistentStore.setBoolean(BACKFILLING_BOOSTER, false);
Log.d(TAG, "Switched off backfilling booster");
}
}
}
} catch (Exception e) {
Log.e(TAG, "caught exception", e);
exception = e;
return null;
}
return null;
}
Aggregations