use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class NightscoutUploader method postTreatments.
private void postTreatments(NightscoutService nightscoutService, String apiSecret) throws Exception {
Log.d(TAG, "Processing treatments for RESTAPI");
final long THIS_QUEUE = UploaderQueue.NIGHTSCOUT_RESTAPI;
final List<UploaderQueue> tups = UploaderQueue.getPendingbyType(Treatments.class.getSimpleName(), THIS_QUEUE);
if (tups != null) {
JSONArray insert_array = new JSONArray();
JSONArray upsert_array = new JSONArray();
for (UploaderQueue up : tups) {
if ((up.action.equals("insert") || (up.action.equals("update")))) {
final Treatments treatment = Treatments.byid(up.reference_id);
if (up.action.equals("insert")) {
// populateV1APITreatmentEntry(insert_array, treatment);
// TODO always use singular upserts for now
populateV1APITreatmentEntry(upsert_array, treatment);
} else if (up.action.equals("update")) {
populateV1APITreatmentEntry(upsert_array, treatment);
}
} else if (up.action.equals("delete")) {
if (up.reference_uuid != null) {
if (apiSecret != null) {
// do we already have a nightscout style reference id
String this_id = up.reference_uuid.length() == 24 ? up.reference_uuid : null;
Response<ResponseBody> lookup = null;
if (this_id == null) {
// look up the _id to delete as we can't use find with delete action nor can we specify our own _id on submission circa nightscout 0.9.2
lookup = nightscoutService.findTreatmentByUUID(apiSecret, up.reference_uuid).execute();
}
// throw an exception if we failed lookup
if ((this_id == null) && (lookup != null) && !lookup.isSuccess()) {
throw new UploaderException(lookup.message(), lookup.code());
} else {
// parse the result
if (this_id == null) {
try {
final String response = lookup.body().string();
final JSONArray jsonArray = new JSONArray(response);
// can only be one
final JSONObject tr = (JSONObject) jsonArray.get(0);
this_id = tr.getString("_id");
} catch (Exception e) {
Log.e(TAG, "Got exception parsing treatment lookup response: " + e);
}
}
// is the id valid now?
if ((this_id != null) && (this_id.length() == 24)) {
final Response<ResponseBody> r = nightscoutService.deleteTreatment(apiSecret, this_id).execute();
if (!r.isSuccess()) {
throw new UploaderException(r.message(), r.code());
} else {
up.completed(THIS_QUEUE);
Log.d(TAG, "Success for RESTAPI treatment delete: " + up.reference_uuid + " _id: " + this_id);
}
} else {
Log.wtf(TAG, "Couldn't find a reference _id for uuid: " + up.reference_uuid + " got: " + this_id);
// don't retry
up.completed(THIS_QUEUE);
}
}
} else {
Log.wtf(TAG, "Cannot delete treatments without api secret being set");
}
}
} else {
Log.wtf(TAG, "Unsupported operation type for treatment: " + up.action);
// don't retry it
up.completed(THIS_QUEUE);
}
}
// handle insert types
if (insert_array.length() != 0) {
final RequestBody body = RequestBody.create(MediaType.parse("application/json"), insert_array.toString());
final Response<ResponseBody> r;
if (apiSecret != null) {
r = nightscoutService.uploadTreatments(apiSecret, body).execute();
if (!r.isSuccess()) {
throw new UploaderException(r.message(), r.code());
} else {
Log.d(TAG, "Success for RESTAPI treatment insert upload");
for (UploaderQueue up : tups) {
if (up.action.equals("insert")) {
// approve all types for this queue
up.completed(THIS_QUEUE);
}
}
checkGzipSupport(r);
}
} else {
Log.wtf(TAG, "Cannot upload treatments without api secret being set");
}
}
// handle upsert types
if (upsert_array.length() != 0) {
for (int i = 0; i < upsert_array.length(); i++) {
JSONObject item = (JSONObject) upsert_array.get(i);
final String match_uuid = item.getString("uuid");
item.put("_id", uuid_to_id(match_uuid));
final RequestBody body = RequestBody.create(MediaType.parse("application/json"), item.toString());
final Response<ResponseBody> r;
if (apiSecret != null) {
r = nightscoutService.upsertTreatments(apiSecret, body).execute();
if (!r.isSuccess()) {
throw new UploaderException(r.message(), r.code());
} else {
Log.d(TAG, "Success for RESTAPI treatment upsert upload: " + match_uuid);
for (UploaderQueue up : tups) {
if (d)
Log.d(TAG, "upsert: " + match_uuid + " / " + up.reference_uuid + " " + up.action + " " + up.reference_id);
if ((up.action.equals("update") || (up.action.equals("insert"))) && (up.reference_uuid.equals(match_uuid) || (uuid_to_id(up.reference_uuid).equals(match_uuid)))) {
if (d)
Log.d(TAG, "upsert: matched");
// approve all types for this queue
up.completed(THIS_QUEUE);
break;
}
}
checkGzipSupport(r);
}
} else {
Log.wtf(TAG, "Cannot upload treatments without api secret being set");
return;
}
}
// if we got this far without exception then mark everything as completed to fix harmless erroneous queue entries
for (UploaderQueue up : tups) {
if (d)
Log.d(TAG, "Marking all items completed");
up.completed(THIS_QUEUE);
}
}
}
}
use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class NightscoutUploader method doMongoUpload.
private boolean doMongoUpload(SharedPreferences prefs, List<BgReading> glucoseDataSets, List<Calibration> meterRecords, List<Calibration> calRecords) {
final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.US);
format.setTimeZone(TimeZone.getDefault());
final String dbURI = prefs.getString("cloud_storage_mongodb_uri", null);
if (dbURI != null) {
try {
final URI uri = new URI(dbURI.trim());
if ((uri.getHost().startsWith("192.168.")) && prefs.getBoolean("skip_lan_uploads_when_no_lan", true) && (!JoH.isLANConnected())) {
Log.d(TAG, "Skipping mongo upload to: " + dbURI + " due to no LAN connection");
return false;
}
} catch (URISyntaxException e) {
UserError.Log.e(TAG, "Invalid mongo URI: " + e);
}
}
final String collectionName = prefs.getString("cloud_storage_mongodb_collection", null);
final String dsCollectionName = prefs.getString("cloud_storage_mongodb_device_status_collection", "devicestatus");
if (dbURI != null && collectionName != null) {
try {
// connect to db
MongoClientURI uri = new MongoClientURI(dbURI.trim() + "?socketTimeoutMS=180000");
MongoClient client = new MongoClient(uri);
// get db
DB db = client.getDB(uri.getDatabase());
// get collection
DBCollection dexcomData = db.getCollection(collectionName.trim());
try {
Log.i(TAG, "The number of EGV records being sent to MongoDB is " + glucoseDataSets.size());
for (BgReading record : glucoseDataSets) {
// make db object
BasicDBObject testData = new BasicDBObject();
testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
if (record != null) {
// KS
testData.put("date", record.timestamp);
testData.put("dateString", format.format(record.timestamp));
testData.put("sgv", Math.round(record.calculated_value));
testData.put("direction", record.slopeName());
testData.put("type", "sgv");
testData.put("filtered", record.ageAdjustedFiltered() * 1000);
testData.put("unfiltered", record.usedRaw() * 1000);
testData.put("rssi", 100);
testData.put("noise", record.noiseValue());
dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
} else
Log.e(TAG, "MongoDB BG record is null.");
}
Log.i(TAG, "The number of MBG records being sent to MongoDB is " + meterRecords.size());
for (Calibration meterRecord : meterRecords) {
// make db object
BasicDBObject testData = new BasicDBObject();
testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
testData.put("type", "mbg");
testData.put("date", meterRecord.timestamp);
testData.put("dateString", format.format(meterRecord.timestamp));
testData.put("mbg", meterRecord.bg);
dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
}
for (Calibration calRecord : calRecords) {
// do not upload undefined slopes
if (calRecord.slope == 0d)
break;
// make db object
BasicDBObject testData = new BasicDBObject();
testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
testData.put("date", calRecord.timestamp);
testData.put("dateString", format.format(calRecord.timestamp));
if (calRecord.check_in) {
testData.put("slope", (calRecord.first_slope));
testData.put("intercept", ((calRecord.first_intercept)));
testData.put("scale", calRecord.first_scale);
} else {
testData.put("slope", (1000 / calRecord.slope));
testData.put("intercept", ((calRecord.intercept * -1000) / (calRecord.slope)));
testData.put("scale", 1);
}
testData.put("type", "cal");
dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
}
// TODO: quick port from original code, revisit before release
DBCollection dsCollection = db.getCollection(dsCollectionName);
BasicDBObject devicestatus = new BasicDBObject();
devicestatus.put("uploaderBattery", getBatteryLevel());
devicestatus.put("created_at", format.format(System.currentTimeMillis()));
dsCollection.insert(devicestatus, WriteConcern.UNACKNOWLEDGED);
// treatments mongo sync using unified queue
Log.d(TAG, "Starting treatments mongo direct");
final long THIS_QUEUE = UploaderQueue.MONGO_DIRECT;
final DBCollection treatmentDb = db.getCollection("treatments");
final List<UploaderQueue> tups = UploaderQueue.getPendingbyType(Treatments.class.getSimpleName(), THIS_QUEUE);
if (tups != null) {
for (UploaderQueue up : tups) {
if ((up.action.equals("insert") || (up.action.equals("update")))) {
Treatments treatment = Treatments.byid(up.reference_id);
if (treatment != null) {
BasicDBObject record = new BasicDBObject();
record.put("timestamp", treatment.timestamp);
record.put("eventType", treatment.eventType);
record.put("enteredBy", treatment.enteredBy);
if (treatment.notes != null)
record.put("notes", treatment.notes);
record.put("uuid", treatment.uuid);
record.put("carbs", treatment.carbs);
record.put("insulin", treatment.insulin);
record.put("created_at", treatment.created_at);
final BasicDBObject searchQuery = new BasicDBObject().append("uuid", treatment.uuid);
// treatmentDb.insert(record, WriteConcern.UNACKNOWLEDGED);
Log.d(TAG, "Sending upsert for: " + treatment.toJSON());
treatmentDb.update(searchQuery, record, true, false);
} else {
Log.d(TAG, "Got null for treatment id: " + up.reference_id);
}
up.completed(THIS_QUEUE);
} else if (up.action.equals("delete")) {
if (up.reference_uuid != null) {
Log.d(TAG, "Processing treatment delete mongo sync for: " + up.reference_uuid);
final BasicDBObject searchQuery = new BasicDBObject().append("uuid", up.reference_uuid);
Log.d(TAG, treatmentDb.remove(searchQuery, WriteConcern.UNACKNOWLEDGED).toString());
}
up.completed(THIS_QUEUE);
} else {
Log.e(TAG, "Unsupported operation type for treatment: " + up.action);
}
}
Log.d(TAG, "Processed " + tups.size() + " Treatment mongo direct upload records");
}
client.close();
failurecount = 0;
return true;
} catch (Exception e) {
Log.e(TAG, "Unable to upload data to mongo " + e.getMessage());
failurecount++;
if (failurecount > 4) {
Home.toaststaticnext("Mongo " + failurecount + " up fails: " + e.getMessage().substring(0, 51));
}
} finally {
if (client != null) {
client.close();
}
}
} catch (Exception e) {
Log.e(TAG, "Unable to upload data to mongo " + e.getMessage());
}
}
return false;
}
use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class Home method processAndApproveTreatment.
private void processAndApproveTreatment() {
// preserve globals before threading off
final double myglucosenumber = thisglucosenumber;
double mytimeoffset = thistimeoffset;
// TODO Handle BG Tests here also
if (watchkeypad) {
// calculate absolute offset
long treatment_timestamp = watchkeypad_timestamp - (long) mytimeoffset;
mytimeoffset = JoH.tsl() - treatment_timestamp;
Log.d(TAG, "Watch Keypad timestamp is: " + JoH.dateTimeText(treatment_timestamp) + " Original offset: " + JoH.qs(thistimeoffset) + " New: " + JoH.qs(mytimeoffset));
if ((mytimeoffset > (DAY_IN_MS * 3)) || (mytimeoffset < -HOUR_IN_MS * 3)) {
Log.e(TAG, "Treatment timestamp out of range: " + mytimeoffset);
JoH.static_toast_long("Treatment time wrong");
WatchUpdaterService.sendWearLocalToast("Treatment error", Toast.LENGTH_LONG);
} else {
JoH.static_toast_long("Treatment processed");
WatchUpdaterService.sendWearLocalToast("Treatment processed", Toast.LENGTH_LONG);
long time = Treatments.getTimeStampWithOffset(mytimeoffset);
// sanity check timestamp
final Treatments exists = Treatments.byTimestamp(time);
if (exists == null) {
Log.d(TAG, "processAndApproveTreatment create watchkeypad Treatment carbs=" + thiscarbsnumber + " insulin=" + thisinsulinnumber + " timestamp=" + JoH.dateTimeText(time) + " uuid=" + thisuuid);
Treatments.create(thiscarbsnumber, thisinsulinnumber, time, thisuuid);
} else {
Log.d(TAG, "processAndApproveTreatment Treatment already exists carbs=" + thiscarbsnumber + " insulin=" + thisinsulinnumber + " timestamp=" + JoH.dateTimeText(time));
}
}
} else {
WatchUpdaterService.sendWearToast("Treatment processed", Toast.LENGTH_LONG);
Treatments.create(thiscarbsnumber, thisinsulinnumber, Treatments.getTimeStampWithOffset(mytimeoffset));
}
hideAllTreatmentButtons();
if (hideTreatmentButtonsIfAllDone()) {
updateCurrentBgInfo("approve button");
}
if (watchkeypad) {
if (myglucosenumber > 0) {
if ((mytimeoffset > (DAY_IN_MS * 3)) || (mytimeoffset < -HOUR_IN_MS * 3)) {
Log.e(TAG, "Treatment bloodtest timestamp out of range: " + mytimeoffset);
} else {
BloodTest.createFromCal(myglucosenumber, mytimeoffset, "Manual Entry", thisuuid);
}
}
watchkeypad = false;
watchkeypadset = false;
watchkeypad_timestamp = -1;
} else
processCalibrationNoUI(myglucosenumber, mytimeoffset);
staticRefreshBGCharts();
}
use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class TreatmentsTable method getData.
private void getData() {
// 3 days
final long startTime = new Date().getTime() - (60000 * 60 * 24 * 3);
final List<Treatments> latest = Treatments.latestForGraph(60, startTime);
ListAdapter adapter = new thisAdapter(this, latest);
this.setListAdapter(adapter);
String msg = "";
int size = 0;
if (latest != null)
size = latest.size();
if (size == 0) {
msg = getResources().getString(R.string.notify_table_size, "Treatments", size);
JoH.static_toast(xdrip.getAppContext(), msg, Toast.LENGTH_SHORT);
}
}
use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip-plus by jamorham.
the class BgGraphBuilder method addBgReadingValues.
private synchronized void addBgReadingValues(final boolean simple) {
if (readings_lock.isLocked()) {
Log.d(TAG, "BgReadings lock is currently held");
}
readings_lock.lock();
try {
if (plugin_adjusted) {
Log.i(TAG, "Reloading as Plugin modified data: " + JoH.backTrace(1) + " size:" + bgReadings.size());
bgReadings.clear();
bgReadings.addAll(BgReading.latestForGraph(loaded_numValues, loaded_start, loaded_end));
} else {
// Log.d(TAG, "not adjusted");
}
filteredValues.clear();
rawInterpretedValues.clear();
iobValues.clear();
activityValues.clear();
cobValues.clear();
predictedBgValues.clear();
polyBgValues.clear();
noisePolyBgValues.clear();
annotationValues.clear();
treatmentValues.clear();
highValues.clear();
lowValues.clear();
inRangeValues.clear();
calibrationValues.clear();
bloodTestValues.clear();
pluginValues.clear();
final double bgScale = bgScale();
final double now = JoH.ts();
// most recent bgreading timestamp we have
long highest_bgreading_timestamp = -1;
// 10 minutes // TODO MAKE PREFERENCE?
double trend_start_working = now - (1000 * 60 * 12);
if (bgReadings.size() > 0) {
highest_bgreading_timestamp = bgReadings.get(0).timestamp;
final double ms_since_last_reading = now - highest_bgreading_timestamp;
if (ms_since_last_reading < 500000) {
// push back start of trend calc window
trend_start_working -= ms_since_last_reading;
Log.d(TAG, "Pushed back trend start by: " + JoH.qs(ms_since_last_reading / 1000) + " secs - last reading: " + JoH.dateTimeText(highest_bgreading_timestamp));
}
}
final double trendstart = trend_start_working;
// 20 minutes // TODO MAKE PREFERENCE
final double noise_trendstart = now - (1000 * 60 * 20);
double oldest_noise_timestamp = now;
double newest_noise_timestamp = 0;
TrendLine[] polys = new TrendLine[5];
polys[0] = new PolyTrendLine(1);
// polys[1] = new PolyTrendLine(2);
polys[1] = new Forecast.LogTrendLine();
polys[2] = new Forecast.ExpTrendLine();
polys[3] = new Forecast.PowerTrendLine();
TrendLine poly = null;
final List<Double> polyxList = new ArrayList<>();
final List<Double> polyyList = new ArrayList<>();
final List<Double> noise_polyxList = new ArrayList<>();
final List<Double> noise_polyyList = new ArrayList<>();
// 8 hours
final double avg1start = now - (1000 * 60 * 60 * 8);
// 8 hours
final double momentum_illustration_start = now - (1000 * 60 * 60 * 2);
avg1startfuzzed = avg1start / FUZZER;
avg1value = 0;
avg1counter = 0;
avg2value = 0;
avg2counter = 0;
double last_calibration = 0;
double last_bloodtest = 0;
if (doMgdl) {
Profile.scale_factor = Constants.MMOLL_TO_MGDL;
} else {
Profile.scale_factor = 1;
}
final long close_to_side_time = (long) (end_time * FUZZER) - (Constants.MINUTE_IN_MS * 10);
// enumerate calibrations
try {
for (Calibration calibration : calibrations) {
if (calibration.timestamp < (start_time * FUZZER))
break;
if (calibration.slope_confidence != 0) {
final long adjusted_timestamp = (calibration.timestamp + (AddCalibration.estimatedInterstitialLagSeconds * 1000));
final PointValueExtended this_point = new PointValueExtended((float) (adjusted_timestamp / FUZZER), (float) unitized(calibration.bg));
if (adjusted_timestamp >= close_to_side_time) {
predictivehours = Math.max(predictivehours, 1);
}
this_point.real_timestamp = calibration.timestamp;
calibrationValues.add(this_point);
if (calibration.timestamp > last_calibration) {
last_calibration = calibration.timestamp;
}
}
}
} catch (Exception e) {
Log.e(TAG, "Exception doing calibration values in bggraphbuilder: " + e.toString());
}
// enumerate blood tests
try {
for (BloodTest bloodtest : bloodtests) {
final long adjusted_timestamp = (bloodtest.timestamp + (AddCalibration.estimatedInterstitialLagSeconds * 1000));
final PointValueExtended this_point = new PointValueExtended((float) (adjusted_timestamp / FUZZER), (float) unitized(bloodtest.mgdl));
this_point.type = PointValueExtended.BloodTest;
this_point.uuid = bloodtest.uuid;
this_point.real_timestamp = bloodtest.timestamp;
// exclude any which have been used for calibration
boolean matches = false;
for (PointValue calibration_point : calibrationValues) {
if ((Math.abs(calibration_point.getX() - this_point.getX())) <= ((AddCalibration.estimatedInterstitialLagSeconds * 1000) / FUZZER) && (calibration_point.getY() == calibration_point.getY())) {
matches = true;
break;
}
}
if (!matches)
bloodTestValues.add(this_point);
if (bloodtest.timestamp > last_bloodtest) {
last_bloodtest = bloodtest.timestamp;
}
if (adjusted_timestamp >= close_to_side_time) {
predictivehours = Math.max(predictivehours, 1);
}
}
} catch (Exception e) {
Log.e(TAG, "Exception doing calibration values in bggraphbuilder: " + e.toString());
}
final boolean has_filtered = DexCollectionType.hasFiltered();
final boolean predict_use_momentum = prefs.getBoolean("predict_use_momentum", true);
final boolean show_moment_working_line = prefs.getBoolean("show_momentum_working_line", false);
final boolean interpret_raw = prefs.getBoolean("interpret_raw", false);
final boolean show_filtered = prefs.getBoolean("show_filtered_curve", false) && has_filtered;
final boolean predict_lows = prefs.getBoolean("predict_lows", true);
final boolean show_plugin = prefs.getBoolean("plugin_plot_on_graph", false);
final boolean glucose_from_plugin = prefs.getBoolean("display_glucose_from_plugin", false);
if ((Home.get_follower()) && (bgReadings.size() < 3)) {
GcmActivity.requestBGsync();
}
final CalibrationAbstract plugin = (show_plugin) ? PluggableCalibration.getCalibrationPluginFromPreferences() : null;
CalibrationAbstract.CalibrationData cd = (plugin != null) ? plugin.getCalibrationData() : null;
int cdposition = 0;
if ((glucose_from_plugin) && (cd != null)) {
// plugin will be adjusting data
plugin_adjusted = true;
}
for (final BgReading bgReading : bgReadings) {
if ((cd != null) && (calibrations.size() > 0)) {
while ((bgReading.timestamp < calibrations.get(cdposition).timestamp) || (calibrations.get(cdposition).slope == 0)) {
Log.d(TAG, "BG reading earlier than calibration at index: " + cdposition + " " + JoH.dateTimeText(bgReading.timestamp) + " cal: " + JoH.dateTimeText(calibrations.get(cdposition).timestamp));
if (cdposition < calibrations.size() - 1) {
cdposition++;
// cd = (plugin != null) ? plugin.getCalibrationData(calibrations.get(cdposition).timestamp) : null;
final CalibrationAbstract.CalibrationData oldcd = cd;
cd = plugin.getCalibrationData(calibrations.get(cdposition).timestamp);
if (cd == null) {
Log.d(TAG, "cd went to null during adjustment - likely graph spans multiple sensors");
cd = oldcd;
}
Log.d(TAG, "Now using calibration from: " + JoH.dateTimeText(calibrations.get(cdposition).timestamp) + " slope: " + cd.slope + " intercept: " + cd.intercept);
} else {
Log.d(TAG, "No more calibrations to choose from");
break;
}
}
}
// swap main and plugin plot if display glucose is from plugin
if ((glucose_from_plugin) && (cd != null)) {
pluginValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
// recalculate from plugin - beware floating / cached references!
bgReading.calculated_value = plugin.getGlucoseFromBgReading(bgReading, cd);
bgReading.filtered_calculated_value = plugin.getGlucoseFromFilteredBgReading(bgReading, cd);
}
if ((show_filtered) && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
filteredValues.add(new PointValue((float) ((bgReading.timestamp - timeshift) / FUZZER), (float) unitized(bgReading.filtered_calculated_value)));
}
if ((interpret_raw && (bgReading.raw_calculated > 0))) {
rawInterpretedValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.raw_calculated)));
}
if ((!glucose_from_plugin) && (plugin != null) && (cd != null)) {
pluginValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(plugin.getGlucoseFromBgReading(bgReading, cd))));
}
if (bgReading.calculated_value >= 400) {
highValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(400)));
} else if (unitized(bgReading.calculated_value) >= highMark) {
highValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
} else if (unitized(bgReading.calculated_value) >= lowMark) {
inRangeValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
} else if (bgReading.calculated_value >= 40) {
lowValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
} else if (bgReading.calculated_value > 13) {
lowValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(40)));
}
avg2counter++;
avg2value += bgReading.calculated_value;
if (bgReading.timestamp > avg1start) {
avg1counter++;
avg1value += bgReading.calculated_value;
}
// noise calculator
if ((!simple || (noise_processed_till_timestamp < highest_bgreading_timestamp)) && (bgReading.timestamp > noise_trendstart) && (bgReading.timestamp > last_calibration)) {
if (has_filtered && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
final double shifted_timestamp = bgReading.timestamp - timeshift;
if (shifted_timestamp > last_calibration) {
if (shifted_timestamp < oldest_noise_timestamp)
oldest_noise_timestamp = shifted_timestamp;
noise_polyxList.add(shifted_timestamp);
noise_polyyList.add((bgReading.filtered_calculated_value));
if (d)
Log.d(TAG, "flt noise poly Added: " + noise_polyxList.size() + " " + JoH.qs(noise_polyxList.get(noise_polyxList.size() - 1)) + " / " + JoH.qs(noise_polyyList.get(noise_polyyList.size() - 1), 2));
}
}
if (bgReading.calculated_value > 0) {
if (bgReading.timestamp < oldest_noise_timestamp)
oldest_noise_timestamp = bgReading.timestamp;
if (bgReading.timestamp > newest_noise_timestamp) {
newest_noise_timestamp = bgReading.timestamp;
original_value = bgReading.calculated_value;
}
noise_polyxList.add((double) bgReading.timestamp);
noise_polyyList.add((bgReading.calculated_value));
if (d)
Log.d(TAG, "raw noise poly Added: " + noise_polyxList.size() + " " + JoH.qs(noise_polyxList.get(noise_polyxList.size() - 1)) + " / " + JoH.qs(noise_polyyList.get(noise_polyyList.size() - 1), 2));
}
}
// momentum trend
if (!simple && (bgReading.timestamp > trendstart) && (bgReading.timestamp > last_calibration)) {
if (has_filtered && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
polyxList.add((double) bgReading.timestamp - timeshift);
polyyList.add(unitized(bgReading.filtered_calculated_value));
}
if (bgReading.calculated_value > 0) {
polyxList.add((double) bgReading.timestamp);
polyyList.add(unitized(bgReading.calculated_value));
}
if (d)
Log.d(TAG, "poly Added: " + JoH.qs(polyxList.get(polyxList.size() - 1)) + " / " + JoH.qs(polyyList.get(polyyList.size() - 1), 2));
}
}
if (avg1counter > 0) {
avg1value = avg1value / avg1counter;
}
if (avg2counter > 0) {
avg2value = avg2value / avg2counter;
}
// always calculate noise if needed
if (noise_processed_till_timestamp < highest_bgreading_timestamp) {
// noise evaluate
Log.d(TAG, "Noise: Processing new data for noise: " + JoH.dateTimeText(noise_processed_till_timestamp) + " vs now: " + JoH.dateTimeText(highest_bgreading_timestamp));
try {
if (d)
Log.d(TAG, "noise Poly list size: " + noise_polyxList.size());
// TODO Impossible to satisfy noise evaluation size with only raw data do we want it with raw only??
if (noise_polyxList.size() > 5) {
noisePoly = new PolyTrendLine(2);
final double[] noise_polyys = PolyTrendLine.toPrimitiveFromList(noise_polyyList);
final double[] noise_polyxs = PolyTrendLine.toPrimitiveFromList(noise_polyxList);
noisePoly.setValues(noise_polyys, noise_polyxs);
last_noise = noisePoly.errorVarience();
if (newest_noise_timestamp > oldest_noise_timestamp) {
best_bg_estimate = noisePoly.predict(newest_noise_timestamp);
last_bg_estimate = noisePoly.predict(newest_noise_timestamp - DEXCOM_PERIOD);
} else {
best_bg_estimate = -99;
last_bg_estimate = -99;
}
Log.i(TAG, "Noise: Poly Error Varience: " + JoH.qs(last_noise, 5));
} else {
Log.i(TAG, "Noise: Not enough data to get sensible noise value");
noisePoly = null;
last_noise = -9999;
best_bg_estimate = -9999;
last_bg_estimate = -9999;
}
// store that we have processed up to this timestamp
noise_processed_till_timestamp = highest_bgreading_timestamp;
} catch (Exception e) {
Log.e(TAG, " Error with noise poly trend: " + e.toString());
}
} else {
Log.d(TAG, "Noise Cached noise timestamp: " + JoH.dateTimeText(noise_processed_till_timestamp));
}
if (!simple) {
// momentum
try {
if (d)
Log.d(TAG, "moment Poly list size: " + polyxList.size());
if (polyxList.size() > 1) {
final double[] polyys = PolyTrendLine.toPrimitiveFromList(polyyList);
final double[] polyxs = PolyTrendLine.toPrimitiveFromList(polyxList);
// set and evaluate poly curve models and select first best
double min_errors = 9999999;
for (TrendLine this_poly : polys) {
if (this_poly != null) {
if (poly == null)
poly = this_poly;
this_poly.setValues(polyys, polyxs);
if (this_poly.errorVarience() < min_errors) {
min_errors = this_poly.errorVarience();
poly = this_poly;
// if (d) Log.d(TAG, "set forecast best model to: " + poly.getClass().getSimpleName() + " with varience of: " + JoH.qs(poly.errorVarience(),14));
}
}
}
if (d)
Log.i(TAG, "set forecast best model to: " + poly.getClass().getSimpleName() + " with varience of: " + JoH.qs(poly.errorVarience(), 4));
} else {
if (d)
Log.i(TAG, "Not enough data for forecast model");
}
} catch (Exception e) {
Log.e(TAG, " Error with poly trend: " + e.toString());
}
try {
// show trend for whole bg reading area
if ((show_moment_working_line) && (poly != null)) {
for (BgReading bgReading : bgReadings) {
// only show working curve for last x hours to a
if (bgReading.timestamp > momentum_illustration_start) {
double polyPredicty = poly.predict(bgReading.timestamp);
// if (d) Log.d(TAG, "Poly predict: "+JoH.qs(polyPredict)+" @ "+JoH.qs(iob.timestamp));
if ((polyPredicty < highMark) && (polyPredicty > 0)) {
PointValue zv = new PointValue((float) (bgReading.timestamp / FUZZER), (float) polyPredicty);
polyBgValues.add(zv);
}
}
}
}
} catch (Exception e) {
Log.e(TAG, "Error creating back trend: " + e.toString());
}
// low estimator
// work backwards to see whether we think a low is estimated
low_occurs_at = -1;
try {
if ((predict_lows) && (prediction_enabled) && (poly != null)) {
final double offset = ActivityRecognizedService.raise_limit_due_to_vehicle_mode() ? unitized(ActivityRecognizedService.getVehicle_mode_adjust_mgdl()) : 0;
final double plow_now = JoH.ts();
// max look-ahead
double plow_timestamp = plow_now + (1000 * 60 * 99);
double polyPredicty = poly.predict(plow_timestamp);
Log.d(TAG, "Low predictor at max lookahead is: " + JoH.qs(polyPredicty));
// store that we have processed up to this timestamp
low_occurs_at_processed_till_timestamp = highest_bgreading_timestamp;
if (polyPredicty <= (lowMark + offset)) {
low_occurs_at = plow_timestamp;
final double lowMarkIndicator = (lowMark - (lowMark / 4));
// if (d) Log.d(TAG, "Poly predict: "+JoH.qs(polyPredict)+" @ "+JoH.qsz(iob.timestamp));
while (plow_timestamp > plow_now) {
plow_timestamp = plow_timestamp - FUZZER;
polyPredicty = poly.predict(plow_timestamp);
if (polyPredicty > (lowMark + offset)) {
PointValue zv = new PointValue((float) (plow_timestamp / FUZZER), (float) polyPredicty);
polyBgValues.add(zv);
} else {
low_occurs_at = plow_timestamp;
if (polyPredicty > lowMarkIndicator) {
polyBgValues.add(new PointValue((float) (plow_timestamp / FUZZER), (float) polyPredicty));
}
}
}
Log.i(TAG, "LOW PREDICTED AT: " + JoH.dateTimeText((long) low_occurs_at));
predictivehours = Math.max(predictivehours, (int) ((low_occurs_at - plow_now) / (60 * 60 * 1000)) + 1);
}
}
} catch (NullPointerException e) {
// Log.d(TAG,"Error with low prediction trend: "+e.toString());
}
final boolean show_noise_working_line;
if (last_noise > NOISE_TRIGGER || (last_noise > BgGraphBuilder.NOISE_TRIGGER_ULTRASENSITIVE && Pref.getBooleanDefaultFalse("engineering_mode") && Pref.getBooleanDefaultFalse("bg_compensate_noise_ultrasensitive"))) {
show_noise_working_line = true;
} else {
show_noise_working_line = prefs.getBoolean("show_noise_workings", false);
}
// noise debug
try {
// overlay noise curve
if ((show_noise_working_line) && (prediction_enabled) && (noisePoly != null)) {
for (BgReading bgReading : bgReadings) {
// only show working curve for last x hours to a
if ((bgReading.timestamp > oldest_noise_timestamp) && (bgReading.timestamp > last_calibration)) {
double polyPredicty = unitized(noisePoly.predict(bgReading.timestamp));
if (d)
Log.d(TAG, "noise Poly predict: " + JoH.qs(polyPredicty) + " @ " + JoH.qs(bgReading.timestamp));
if ((polyPredicty < highMark) && (polyPredicty > 0)) {
PointValue zv = new PointValue((float) (bgReading.timestamp / FUZZER), (float) polyPredicty);
noisePolyBgValues.add(zv);
}
}
}
}
} catch (Exception e) {
Log.e(TAG, "Error creating noise working trend: " + e.toString());
}
try {
// display treatment blobs and annotations
for (Treatments treatment : treatments) {
double height = 6 * bgScale;
if (treatment.insulin > 0)
// some scaling needed I think
height = treatment.insulin;
if (height > highMark)
height = highMark;
if (height < lowMark)
height = lowMark;
final PointValueExtended pv = new PointValueExtended((float) (treatment.timestamp / FUZZER), (float) height);
String mylabel = "";
if (treatment.insulin > 0) {
if (mylabel.length() > 0)
mylabel = mylabel + System.getProperty("line.separator");
mylabel = mylabel + (JoH.qs(treatment.insulin, 2) + "u").replace(".0u", "u");
}
if (treatment.carbs > 0) {
if (mylabel.length() > 0)
mylabel = mylabel + System.getProperty("line.separator");
mylabel = mylabel + (JoH.qs(treatment.carbs, 1) + "g").replace(".0g", "g");
}
// standard label
pv.setLabel(mylabel);
// Log.d(TAG, "watchkeypad pv.mylabel: " + mylabel);
if ((treatment.notes != null) && (treatment.notes.length() > 0)) {
pv.note = treatment.notes;
// Log.d(TAG, "watchkeypad pv.note: " + pv.note + " mylabel: " + mylabel);
try {
final Pattern p = Pattern.compile(".*?pos:([0-9.]+).*");
final Matcher m = p.matcher(treatment.enteredBy);
if (m.matches()) {
pv.set(pv.getX(), (float) JoH.tolerantParseDouble(m.group(1)));
}
} catch (Exception e) {
Log.d(TAG, "Exception matching position: " + e);
}
} else {
pv.note = treatment.getBestShortText();
}
if (treatmentValues.size() > 0) {
// not sure if this >1 is right really - needs a review
PointValue lastpv = treatmentValues.get(treatmentValues.size() - 1);
if (Math.abs(lastpv.getX() - pv.getX()) < ((10 * 60 * 1000) / FUZZER)) {
// merge label with previous - Intelligent parsing and additions go here
if (d)
Log.d(TAG, "Merge treatment difference: " + Float.toString(lastpv.getX() - pv.getX()));
String lastlabel = String.valueOf(lastpv.getLabelAsChars());
if (lastlabel.length() > 0) {
lastpv.setLabel(lastlabel + "+" + mylabel);
pv.setLabel("");
}
}
}
// hover
treatmentValues.add(pv);
if (d)
Log.d(TAG, "Treatment total record: " + Double.toString(height) + " " + " timestamp: " + Long.toString(treatment.timestamp));
}
} catch (Exception e) {
Log.e(TAG, "Exception doing treatment values in bggraphbuilder: " + e.toString());
}
try {
// we need to check we actually have sufficient data for this
double predictedbg = -1000;
BgReading mylastbg = bgReadings.get(0);
double lasttimestamp = 0;
// this can be optimised to oncreate and onchange
// TODO handle this better now we use profile time blocks
Profile.reloadPreferencesIfNeeded(prefs);
try {
if (mylastbg != null) {
if (doMgdl) {
predictedbg = mylastbg.calculated_value;
} else {
predictedbg = mylastbg.calculated_value_mmol();
}
// if (d) Log.d(TAG, "Starting prediction with bg of: " + JoH.qs(predictedbg));
lasttimestamp = mylastbg.timestamp / FUZZER;
if (d)
Log.d(TAG, "Starting prediction with bg of: " + JoH.qs(predictedbg) + " secs ago: " + (JoH.ts() - mylastbg.timestamp) / 1000);
} else {
Log.i(TAG, "COULD NOT GET LAST BG READING FOR PREDICTION!!!");
}
} catch (Exception e) {
// could not get a bg reading
}
final double iobscale = 1 * bgScale;
final double cobscale = 0.2 * bgScale;
final double initial_predicted_bg = predictedbg;
final double relaxed_predicted_bg_limit = initial_predicted_bg * 1.20;
final double cob_insulin_max_draw_value = highMark * 1.20;
// final List<Iob> iobinfo_old = Treatments.ioBForGraph(numValues, (start_time * FUZZER));
// for test
final List<Iob> iobinfo = (simulation_enabled) ? Treatments.ioBForGraph_new(NUM_VALUES, (start_time * FUZZER)) : null;
// initial value in case there are no iob records
long fuzzed_timestamp = (long) end_time;
if (d)
Log.d(TAG, "Internal date timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", new java.util.Date()));
if (d)
Log.d(TAG, "initial Fuzzed end timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", fuzzed_timestamp * FUZZER));
if (d)
Log.d(TAG, "initial Fuzzed start timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", (long) start_time * FUZZER));
if ((iobinfo != null) && (prediction_enabled) && (simulation_enabled)) {
double predict_weight = 0.1;
boolean iob_shown_already = false;
for (Iob iob : iobinfo) {
// double activity = iob.activity;
if ((iob.iob > 0) || (iob.cob > 0) || (iob.jActivity > 0) || (iob.jCarbImpact > 0)) {
fuzzed_timestamp = iob.timestamp / FUZZER;
if (d)
Log.d(TAG, "iob timestamp: " + iob.timestamp);
if (iob.iob > Profile.minimum_shown_iob) {
double height = iob.iob * iobscale;
if (height > cob_insulin_max_draw_value)
height = cob_insulin_max_draw_value;
PointValue pv = new PointValue((float) fuzzed_timestamp, (float) height);
iobValues.add(pv);
// currently scaled by profile
double activityheight = iob.jActivity * 3;
if (activityheight > cob_insulin_max_draw_value)
activityheight = cob_insulin_max_draw_value;
PointValue av = new PointValue((float) fuzzed_timestamp, (float) activityheight);
activityValues.add(av);
}
if (iob.cob > 0) {
double height = iob.cob * cobscale;
if (height > cob_insulin_max_draw_value)
height = cob_insulin_max_draw_value;
PointValue pv = new PointValue((float) fuzzed_timestamp, (float) height);
if (d)
Log.d(TAG, "Cob total record: " + JoH.qs(height) + " " + JoH.qs(iob.cob) + " " + Float.toString(pv.getY()) + " @ timestamp: " + Long.toString(iob.timestamp));
// warning should not be hardcoded
cobValues.add(pv);
}
// do we actually need to calculate this within the loop - can we use only the last datum?
if (fuzzed_timestamp > (lasttimestamp)) {
double polyPredict = 0;
if (poly != null) {
try {
polyPredict = poly.predict(iob.timestamp);
if (d)
Log.d(TAG, "Poly predict: " + JoH.qs(polyPredict) + " @ " + JoH.dateTimeText(iob.timestamp));
if (show_moment_working_line) {
if (((polyPredict < highMark) || (polyPredict < initial_predicted_bg)) && (polyPredict > 0)) {
PointValue zv = new PointValue((float) fuzzed_timestamp, (float) polyPredict);
polyBgValues.add(zv);
}
}
} catch (Exception e) {
Log.e(TAG, "Got exception with poly predict: " + e.toString());
}
}
if (d)
Log.d(TAG, "Processing prediction: before: " + JoH.qs(predictedbg) + " activity: " + JoH.qs(iob.jActivity) + " jcarbimpact: " + JoH.qs(iob.jCarbImpact));
// lower bg by current insulin activity
predictedbg -= iob.jActivity;
predictedbg += iob.jCarbImpact;
double predictedbg_final = predictedbg;
// add momentum characteristics if we have them
final boolean momentum_smoothing = true;
if ((predict_use_momentum) && (polyPredict > 0)) {
predictedbg_final = ((predictedbg * predict_weight) + polyPredict) / (predict_weight + 1);
if (momentum_smoothing)
predictedbg = predictedbg_final;
if (d)
Log.d(TAG, "forecast predict_weight: " + JoH.qs(predict_weight));
}
// from 0-infinity - // TODO account for step!!!
predict_weight = predict_weight * 2.5;
// we should pull in actual graph upper and lower limits here
if (((predictedbg_final < cob_insulin_max_draw_value) || (predictedbg_final < relaxed_predicted_bg_limit)) && (predictedbg_final > 0)) {
PointValue zv = new PointValue((float) fuzzed_timestamp, (float) predictedbg_final);
predictedBgValues.add(zv);
}
}
if (fuzzed_timestamp > end_time) {
// round up to nearest future hour - timestamps in minutes here
predictivehours = (int) (((fuzzed_timestamp - end_time) * FUZZER) / (1000 * 60 * 60)) + 1;
if (d)
Log.d(TAG, "Predictive hours updated to: " + predictivehours);
} else {
// KS Log.d(TAG, "IOB DEBUG: " + (fuzzed_timestamp - end_time) + " " + iob.iob);
if (!iob_shown_already && (Math.abs(fuzzed_timestamp - end_time) < 5) && (iob.iob > 0)) {
iob_shown_already = true;
// show current iob
// double position = 12.4 * bgScale; // this is for mmol - needs generic for mg/dl
// if (Math.abs(predictedbg - position) < (2 * bgScale)) {
// position = 7.0 * bgScale;
// }
// PointValue iv = new PointValue((float) fuzzed_timestamp, (float) position);
DecimalFormat df = new DecimalFormat("#");
df.setMaximumFractionDigits(2);
df.setMinimumIntegerDigits(1);
// iv.setLabel("IoB: " + df.format(iob.iob));
Home.updateStatusLine("iob", df.format(iob.iob));
// annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
}
}
}
}
if (d)
Log.i(TAG, "Size of iob: " + Integer.toString(iobinfo.size()) + " Predictive hours: " + Integer.toString(predictivehours) + " Predicted end game change: " + JoH.qs(predictedbg - mylastbg.calculated_value_mmol()) + " Start bg: " + JoH.qs(mylastbg.calculated_value_mmol()) + " Predicted: " + JoH.qs(predictedbg));
// calculate bolus or carb adjustment - these should have granularity for injection / pump and thresholds
} else {
if (d)
Log.i(TAG, "iobinfo was null");
}
double[] evaluation;
if (prediction_enabled && simulation_enabled) {
// if (doMgdl) {
// These routines need to understand how the profile is defined to use native instead of scaled
evaluation = Profile.evaluateEndGameMmol(predictedbg, lasttimestamp * FUZZER, end_time * FUZZER);
// } else {
// evaluation = Profile.evaluateEndGameMmol(predictedbg, lasttimestamp * FUZZER, end_time * FUZZER);
// }
String bwp_update = "";
keyStore.putL("bwp_last_insulin_timestamp", -1);
if (d)
Log.i(TAG, "Predictive BWP: Current prediction: " + JoH.qs(predictedbg) + " / carbs: " + JoH.qs(evaluation[0]) + " insulin: " + JoH.qs(evaluation[1]));
if (!BgReading.isDataStale()) {
if (((low_occurs_at < 1) || Pref.getBooleanDefaultFalse("always_show_bwp")) && (Pref.getBooleanDefaultFalse("show_bwp"))) {
if (evaluation[0] > Profile.minimum_carb_recommendation) {
// PointValue iv = new PointValue((float) fuzzed_timestamp, (float) (10 * bgScale));
// iv.setLabel("+Carbs: " + JoH.qs(evaluation[0], 0));
bwp_update = "\u224F" + " Carbs: " + JoH.qs(evaluation[0], 0);
// annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
} else if (evaluation[1] > Profile.minimum_insulin_recommendation) {
// PointValue iv = new PointValue((float) fuzzed_timestamp, (float) (11 * bgScale));
// iv.setLabel("+Insulin: " + JoH.qs(evaluation[1], 1));
keyStore.putS("bwp_last_insulin", JoH.qs(evaluation[1], 1) + ((low_occurs_at > 0) ? ("!") : ""));
keyStore.putL("bwp_last_insulin_timestamp", JoH.tsl());
// warning symbol
bwp_update = "\u224F" + " Insulin: " + JoH.qs(evaluation[1], 1) + ((low_occurs_at > 0) ? (" " + "\u26A0") : "");
// annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
}
}
}
// always send so we can blank if needed
Home.updateStatusLine("bwp", bwp_update);
}
} catch (Exception e) {
Log.e(TAG, "Exception doing iob values in bggraphbuilder: " + e.toString());
}
}
// if !simple
} finally {
readings_lock.unlock();
}
}
Aggregations