use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class NightscoutBackfillActivity method backfillRun.
public synchronized void backfillRun(View v) {
locked = JoH.tsl();
doitButton.setVisibility(View.INVISIBLE);
JoH.static_toast_long("Please wait..");
new Thread(new Runnable() {
@Override
public void run() {
final PowerManager.WakeLock wl = JoH.getWakeLock("nightscout-backfill", 600000);
try {
final List<BgReading> the_readings = BgReading.latestForGraphAsc(500000, calendar.getTimeInMillis(), JoH.tsl());
if ((the_readings != null) && (the_readings.size() > 0)) {
PersistentStore.setBoolean(UploaderTask.BACKFILLING_BOOSTER, true);
long bgcount = the_readings.size();
long trcount = 0;
for (BgReading bg : the_readings) {
UploaderQueue.newEntry("update", bg);
}
final List<Treatments> the_treatments = Treatments.latestForGraph(50000, calendar.getTimeInMillis(), JoH.tsl());
if ((the_treatments != null) && (the_treatments.size() > 0)) {
trcount = the_treatments.size();
for (Treatments tr : the_treatments) {
UploaderQueue.newEntry("update", tr);
}
}
// TODO Calibrations? Blood tests?
JoH.static_toast_long("Queued " + bgcount + " glucose readings and " + trcount + " treatments!");
SyncService.startSyncService(500);
// clear lock
locked = 0;
} else {
JoH.static_toast_long("Didn't find any glucose readings in that time period");
}
} finally {
JoH.releaseWakeLock(wl);
}
}
}).start();
finish();
}
use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class ListenerService method getWearTreatmentsData.
private synchronized DataMap getWearTreatmentsData(int count, long last_send_time, int min_count) {
forceGoogleApiConnect();
Log.d(TAG, "getWearTreatmentsData last_send_time:" + JoH.dateTimeText(last_send_time) + " max count=" + count + " min_count=" + min_count);
Treatments last_log = Treatments.lastSystime();
if (last_log != null) {
Log.d(TAG, "getWearTreatmentsData last systimestamp: " + last_log.systimestamp + " " + JoH.dateTimeText((long) last_log.systimestamp));
}
if (last_log != null && last_log.systimestamp > 0 && last_send_time <= last_log.systimestamp) {
// startTime
long last_send_success = last_send_time;
Log.d(TAG, "getWearTreatmentsData last_send_time < last_log.timestamp:" + JoH.dateTimeText((long) last_log.systimestamp));
List<Treatments> logs = Treatments.latestForGraphSystime(count, last_send_time);
if (!logs.isEmpty() && logs.size() > min_count) {
// Log.d(TAG, "getWearLogData count = " + logs.size());
DataMap entries = dataMap(last_log);
final ArrayList<DataMap> dataMaps = new ArrayList<>(logs.size());
for (Treatments log : logs) {
dataMaps.add(dataMap(log));
last_send_success = (long) log.systimestamp;
// Log.d(TAG, "getWearTreatmentsData set last_send_sucess:" + JoH.dateTimeText(last_send_sucess) + " Log:" + log.toString());
}
// MOST IMPORTANT LINE FOR TIMESTAMP
entries.putLong("time", new Date().getTime());
entries.putDataMapArrayList("entries", dataMaps);
Log.i(TAG, "getWearTreatmentsData SYNCED treatments up to " + JoH.dateTimeText(last_send_success) + " count = " + logs.size());
return entries;
} else
Log.i(TAG, "getWearTreatmentsData SYNCED treatments up to " + JoH.dateTimeText(last_send_success) + " count = 0");
}
return null;
}
use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class WatchUpdaterService method sendWearTreatmentsData.
public static boolean sendWearTreatmentsData(Integer count, long startTime, List<Treatments> list) {
try {
if (googleApiClient != null && !googleApiClient.isConnected() && !googleApiClient.isConnecting()) {
googleApiClient.connect();
}
if (googleApiClient != null) {
Treatments last = list != null && list.size() > 0 ? list.get(0) : Treatments.last();
if (last != null) {
Log.d(TAG, "sendWearTreatmentsData last.timestamp:" + JoH.dateTimeText(last.timestamp));
} else {
Log.d(TAG, "sendWearTreatmentsData no treatments exist");
return true;
}
List<Treatments> graph;
if (list != null)
graph = list;
else if (startTime == 0)
graph = Treatments.latest(count);
else
graph = Treatments.latestForGraph(count, startTime);
if (!graph.isEmpty()) {
Log.d(TAG, "sendWearTreatmentsData graph size=" + graph.size());
final ArrayList<DataMap> dataMaps = new ArrayList<>(graph.size());
DataMap entries = dataMap(last);
for (Treatments data : graph) {
dataMaps.add(dataMap(data));
}
Log.d(TAG, "sendWearTreatmentsData entries=" + entries);
// MOST IMPORTANT LINE FOR TIMESTAMP
entries.putLong("time", new Date().getTime());
entries.putString("action", "insert");
entries.putDataMapArrayList("entries", dataMaps);
new SendToDataLayerThread(WEARABLE_TREATMENTS_DATA_PATH, googleApiClient).executeOnExecutor(xdrip.executor, entries);
} else
Log.d(TAG, "sendWearTreatmentsData treatments count = 0");
} else {
Log.e(TAG, "sendWearTreatmentsData No connection to wearable available for send treatment!");
return false;
}
} catch (NullPointerException e) {
Log.e(TAG, "Nullpointer exception in sendWearTreatmentsData: " + e);
return false;
}
return true;
}
use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class BgGraphBuilder method addBgReadingValues.
private synchronized void addBgReadingValues(final boolean simple) {
if (readings_lock.isLocked()) {
Log.d(TAG, "BgReadings lock is currently held");
}
readings_lock.lock();
try {
if (plugin_adjusted) {
Log.i(TAG, "Reloading as Plugin modified data: " + JoH.backTrace(1) + " size:" + bgReadings.size());
bgReadings.clear();
bgReadings.addAll(BgReading.latestForGraph(loaded_numValues, loaded_start, loaded_end));
} else {
// Log.d(TAG, "not adjusted");
}
filteredValues.clear();
rawInterpretedValues.clear();
iobValues.clear();
activityValues.clear();
cobValues.clear();
predictedBgValues.clear();
polyBgValues.clear();
noisePolyBgValues.clear();
annotationValues.clear();
treatmentValues.clear();
highValues.clear();
lowValues.clear();
inRangeValues.clear();
calibrationValues.clear();
bloodTestValues.clear();
pluginValues.clear();
final double bgScale = bgScale();
final double now = JoH.ts();
// most recent bgreading timestamp we have
long highest_bgreading_timestamp = -1;
// 10 minutes // TODO MAKE PREFERENCE?
double trend_start_working = now - (1000 * 60 * 12);
if (bgReadings.size() > 0) {
highest_bgreading_timestamp = bgReadings.get(0).timestamp;
final double ms_since_last_reading = now - highest_bgreading_timestamp;
if (ms_since_last_reading < 500000) {
// push back start of trend calc window
trend_start_working -= ms_since_last_reading;
Log.d(TAG, "Pushed back trend start by: " + JoH.qs(ms_since_last_reading / 1000) + " secs - last reading: " + JoH.dateTimeText(highest_bgreading_timestamp));
}
}
final double trendstart = trend_start_working;
// 20 minutes // TODO MAKE PREFERENCE
final double noise_trendstart = now - (1000 * 60 * 20);
double oldest_noise_timestamp = now;
double newest_noise_timestamp = 0;
TrendLine[] polys = new TrendLine[5];
polys[0] = new PolyTrendLine(1);
// polys[1] = new PolyTrendLine(2);
polys[1] = new Forecast.LogTrendLine();
polys[2] = new Forecast.ExpTrendLine();
polys[3] = new Forecast.PowerTrendLine();
TrendLine poly = null;
final List<Double> polyxList = new ArrayList<>();
final List<Double> polyyList = new ArrayList<>();
final List<Double> noise_polyxList = new ArrayList<>();
final List<Double> noise_polyyList = new ArrayList<>();
// 8 hours
final double avg1start = now - (1000 * 60 * 60 * 8);
// 8 hours
final double momentum_illustration_start = now - (1000 * 60 * 60 * 2);
avg1startfuzzed = avg1start / FUZZER;
avg1value = 0;
avg1counter = 0;
avg2value = 0;
avg2counter = 0;
double last_calibration = 0;
double last_bloodtest = 0;
if (doMgdl) {
Profile.scale_factor = Constants.MMOLL_TO_MGDL;
} else {
Profile.scale_factor = 1;
}
final long close_to_side_time = (long) (end_time * FUZZER) - (Constants.MINUTE_IN_MS * 10);
// enumerate calibrations
try {
for (Calibration calibration : calibrations) {
if (calibration.timestamp < (start_time * FUZZER))
break;
if (calibration.slope_confidence != 0) {
final long adjusted_timestamp = (calibration.timestamp + (AddCalibration.estimatedInterstitialLagSeconds * 1000));
final PointValueExtended this_point = new PointValueExtended((float) (adjusted_timestamp / FUZZER), (float) unitized(calibration.bg));
if (adjusted_timestamp >= close_to_side_time) {
predictivehours = Math.max(predictivehours, 1);
}
this_point.real_timestamp = calibration.timestamp;
calibrationValues.add(this_point);
if (calibration.timestamp > last_calibration) {
last_calibration = calibration.timestamp;
}
}
}
} catch (Exception e) {
Log.e(TAG, "Exception doing calibration values in bggraphbuilder: " + e.toString());
}
// enumerate blood tests
try {
for (BloodTest bloodtest : bloodtests) {
final long adjusted_timestamp = (bloodtest.timestamp + (AddCalibration.estimatedInterstitialLagSeconds * 1000));
final PointValueExtended this_point = new PointValueExtended((float) (adjusted_timestamp / FUZZER), (float) unitized(bloodtest.mgdl));
this_point.type = PointValueExtended.BloodTest;
this_point.uuid = bloodtest.uuid;
this_point.real_timestamp = bloodtest.timestamp;
// exclude any which have been used for calibration
boolean matches = false;
for (PointValue calibration_point : calibrationValues) {
if ((Math.abs(calibration_point.getX() - this_point.getX())) <= ((AddCalibration.estimatedInterstitialLagSeconds * 1000) / FUZZER) && (calibration_point.getY() == calibration_point.getY())) {
matches = true;
break;
}
}
if (!matches)
bloodTestValues.add(this_point);
if (bloodtest.timestamp > last_bloodtest) {
last_bloodtest = bloodtest.timestamp;
}
if (adjusted_timestamp >= close_to_side_time) {
predictivehours = Math.max(predictivehours, 1);
}
}
} catch (Exception e) {
Log.e(TAG, "Exception doing calibration values in bggraphbuilder: " + e.toString());
}
final boolean has_filtered = DexCollectionType.hasFiltered();
final boolean predict_use_momentum = prefs.getBoolean("predict_use_momentum", true);
final boolean show_moment_working_line = prefs.getBoolean("show_momentum_working_line", false);
final boolean interpret_raw = prefs.getBoolean("interpret_raw", false);
final boolean show_filtered = prefs.getBoolean("show_filtered_curve", false) && has_filtered;
final boolean predict_lows = prefs.getBoolean("predict_lows", true);
final boolean show_plugin = prefs.getBoolean("plugin_plot_on_graph", false);
final boolean glucose_from_plugin = prefs.getBoolean("display_glucose_from_plugin", false);
if ((Home.get_follower()) && (bgReadings.size() < 3)) {
GcmActivity.requestBGsync();
}
final CalibrationAbstract plugin = (show_plugin) ? PluggableCalibration.getCalibrationPluginFromPreferences() : null;
CalibrationAbstract.CalibrationData cd = (plugin != null) ? plugin.getCalibrationData() : null;
int cdposition = 0;
if ((glucose_from_plugin) && (cd != null)) {
// plugin will be adjusting data
plugin_adjusted = true;
}
for (final BgReading bgReading : bgReadings) {
if ((cd != null) && (calibrations.size() > 0)) {
while ((bgReading.timestamp < calibrations.get(cdposition).timestamp) || (calibrations.get(cdposition).slope == 0)) {
Log.d(TAG, "BG reading earlier than calibration at index: " + cdposition + " " + JoH.dateTimeText(bgReading.timestamp) + " cal: " + JoH.dateTimeText(calibrations.get(cdposition).timestamp));
if (cdposition < calibrations.size() - 1) {
cdposition++;
// cd = (plugin != null) ? plugin.getCalibrationData(calibrations.get(cdposition).timestamp) : null;
final CalibrationAbstract.CalibrationData oldcd = cd;
cd = plugin.getCalibrationData(calibrations.get(cdposition).timestamp);
if (cd == null) {
Log.d(TAG, "cd went to null during adjustment - likely graph spans multiple sensors");
cd = oldcd;
}
Log.d(TAG, "Now using calibration from: " + JoH.dateTimeText(calibrations.get(cdposition).timestamp) + " slope: " + cd.slope + " intercept: " + cd.intercept);
} else {
Log.d(TAG, "No more calibrations to choose from");
break;
}
}
}
// swap main and plugin plot if display glucose is from plugin
if ((glucose_from_plugin) && (cd != null)) {
pluginValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
// recalculate from plugin - beware floating / cached references!
bgReading.calculated_value = plugin.getGlucoseFromBgReading(bgReading, cd);
bgReading.filtered_calculated_value = plugin.getGlucoseFromFilteredBgReading(bgReading, cd);
}
if ((show_filtered) && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
filteredValues.add(new PointValue((float) ((bgReading.timestamp - timeshift) / FUZZER), (float) unitized(bgReading.filtered_calculated_value)));
}
if ((interpret_raw && (bgReading.raw_calculated > 0))) {
rawInterpretedValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.raw_calculated)));
}
if ((!glucose_from_plugin) && (plugin != null) && (cd != null)) {
pluginValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(plugin.getGlucoseFromBgReading(bgReading, cd))));
}
if (bgReading.calculated_value >= 400) {
highValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(400)));
} else if (unitized(bgReading.calculated_value) >= highMark) {
highValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
} else if (unitized(bgReading.calculated_value) >= lowMark) {
inRangeValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
} else if (bgReading.calculated_value >= 40) {
lowValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
} else if (bgReading.calculated_value > 13) {
lowValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(40)));
}
avg2counter++;
avg2value += bgReading.calculated_value;
if (bgReading.timestamp > avg1start) {
avg1counter++;
avg1value += bgReading.calculated_value;
}
// noise calculator
if ((!simple || (noise_processed_till_timestamp < highest_bgreading_timestamp)) && (bgReading.timestamp > noise_trendstart) && (bgReading.timestamp > last_calibration)) {
if (has_filtered && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
final double shifted_timestamp = bgReading.timestamp - timeshift;
if (shifted_timestamp > last_calibration) {
if (shifted_timestamp < oldest_noise_timestamp)
oldest_noise_timestamp = shifted_timestamp;
noise_polyxList.add(shifted_timestamp);
noise_polyyList.add((bgReading.filtered_calculated_value));
if (d)
Log.d(TAG, "flt noise poly Added: " + noise_polyxList.size() + " " + JoH.qs(noise_polyxList.get(noise_polyxList.size() - 1)) + " / " + JoH.qs(noise_polyyList.get(noise_polyyList.size() - 1), 2));
}
}
if (bgReading.calculated_value > 0) {
if (bgReading.timestamp < oldest_noise_timestamp)
oldest_noise_timestamp = bgReading.timestamp;
if (bgReading.timestamp > newest_noise_timestamp) {
newest_noise_timestamp = bgReading.timestamp;
original_value = bgReading.calculated_value;
}
noise_polyxList.add((double) bgReading.timestamp);
noise_polyyList.add((bgReading.calculated_value));
if (d)
Log.d(TAG, "raw noise poly Added: " + noise_polyxList.size() + " " + JoH.qs(noise_polyxList.get(noise_polyxList.size() - 1)) + " / " + JoH.qs(noise_polyyList.get(noise_polyyList.size() - 1), 2));
}
}
// momentum trend
if (!simple && (bgReading.timestamp > trendstart) && (bgReading.timestamp > last_calibration)) {
if (has_filtered && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
polyxList.add((double) bgReading.timestamp - timeshift);
polyyList.add(unitized(bgReading.filtered_calculated_value));
}
if (bgReading.calculated_value > 0) {
polyxList.add((double) bgReading.timestamp);
polyyList.add(unitized(bgReading.calculated_value));
}
if (d)
Log.d(TAG, "poly Added: " + JoH.qs(polyxList.get(polyxList.size() - 1)) + " / " + JoH.qs(polyyList.get(polyyList.size() - 1), 2));
}
}
if (avg1counter > 0) {
avg1value = avg1value / avg1counter;
}
if (avg2counter > 0) {
avg2value = avg2value / avg2counter;
}
// always calculate noise if needed
if (noise_processed_till_timestamp < highest_bgreading_timestamp) {
// noise evaluate
Log.d(TAG, "Noise: Processing new data for noise: " + JoH.dateTimeText(noise_processed_till_timestamp) + " vs now: " + JoH.dateTimeText(highest_bgreading_timestamp));
try {
if (d)
Log.d(TAG, "noise Poly list size: " + noise_polyxList.size());
// TODO Impossible to satisfy noise evaluation size with only raw data do we want it with raw only??
if (noise_polyxList.size() > 5) {
noisePoly = new PolyTrendLine(2);
final double[] noise_polyys = PolyTrendLine.toPrimitiveFromList(noise_polyyList);
final double[] noise_polyxs = PolyTrendLine.toPrimitiveFromList(noise_polyxList);
noisePoly.setValues(noise_polyys, noise_polyxs);
last_noise = noisePoly.errorVarience();
if (newest_noise_timestamp > oldest_noise_timestamp) {
best_bg_estimate = noisePoly.predict(newest_noise_timestamp);
last_bg_estimate = noisePoly.predict(newest_noise_timestamp - DEXCOM_PERIOD);
} else {
best_bg_estimate = -99;
last_bg_estimate = -99;
}
Log.i(TAG, "Noise: Poly Error Varience: " + JoH.qs(last_noise, 5));
} else {
Log.i(TAG, "Noise: Not enough data to get sensible noise value");
noisePoly = null;
last_noise = -9999;
best_bg_estimate = -9999;
last_bg_estimate = -9999;
}
// store that we have processed up to this timestamp
noise_processed_till_timestamp = highest_bgreading_timestamp;
} catch (Exception e) {
Log.e(TAG, " Error with noise poly trend: " + e.toString());
}
} else {
Log.d(TAG, "Noise Cached noise timestamp: " + JoH.dateTimeText(noise_processed_till_timestamp));
}
if (!simple) {
// momentum
try {
if (d)
Log.d(TAG, "moment Poly list size: " + polyxList.size());
if (polyxList.size() > 1) {
final double[] polyys = PolyTrendLine.toPrimitiveFromList(polyyList);
final double[] polyxs = PolyTrendLine.toPrimitiveFromList(polyxList);
// set and evaluate poly curve models and select first best
double min_errors = 9999999;
for (TrendLine this_poly : polys) {
if (this_poly != null) {
if (poly == null)
poly = this_poly;
this_poly.setValues(polyys, polyxs);
if (this_poly.errorVarience() < min_errors) {
min_errors = this_poly.errorVarience();
poly = this_poly;
// if (d) Log.d(TAG, "set forecast best model to: " + poly.getClass().getSimpleName() + " with varience of: " + JoH.qs(poly.errorVarience(),14));
}
}
}
if (d)
Log.i(TAG, "set forecast best model to: " + poly.getClass().getSimpleName() + " with varience of: " + JoH.qs(poly.errorVarience(), 4));
} else {
if (d)
Log.i(TAG, "Not enough data for forecast model");
}
} catch (Exception e) {
Log.e(TAG, " Error with poly trend: " + e.toString());
}
try {
// show trend for whole bg reading area
if ((show_moment_working_line) && (poly != null)) {
for (BgReading bgReading : bgReadings) {
// only show working curve for last x hours to a
if (bgReading.timestamp > momentum_illustration_start) {
double polyPredicty = poly.predict(bgReading.timestamp);
// if (d) Log.d(TAG, "Poly predict: "+JoH.qs(polyPredict)+" @ "+JoH.qs(iob.timestamp));
if ((polyPredicty < highMark) && (polyPredicty > 0)) {
PointValue zv = new PointValue((float) (bgReading.timestamp / FUZZER), (float) polyPredicty);
polyBgValues.add(zv);
}
}
}
}
} catch (Exception e) {
Log.e(TAG, "Error creating back trend: " + e.toString());
}
// low estimator
// work backwards to see whether we think a low is estimated
low_occurs_at = -1;
try {
if ((predict_lows) && (prediction_enabled) && (poly != null)) {
final double offset = ActivityRecognizedService.raise_limit_due_to_vehicle_mode() ? unitized(ActivityRecognizedService.getVehicle_mode_adjust_mgdl()) : 0;
final double plow_now = JoH.ts();
// max look-ahead
double plow_timestamp = plow_now + (1000 * 60 * 99);
double polyPredicty = poly.predict(plow_timestamp);
Log.d(TAG, "Low predictor at max lookahead is: " + JoH.qs(polyPredicty));
// store that we have processed up to this timestamp
low_occurs_at_processed_till_timestamp = highest_bgreading_timestamp;
if (polyPredicty <= (lowMark + offset)) {
low_occurs_at = plow_timestamp;
final double lowMarkIndicator = (lowMark - (lowMark / 4));
// if (d) Log.d(TAG, "Poly predict: "+JoH.qs(polyPredict)+" @ "+JoH.qsz(iob.timestamp));
while (plow_timestamp > plow_now) {
plow_timestamp = plow_timestamp - FUZZER;
polyPredicty = poly.predict(plow_timestamp);
if (polyPredicty > (lowMark + offset)) {
PointValue zv = new PointValue((float) (plow_timestamp / FUZZER), (float) polyPredicty);
polyBgValues.add(zv);
} else {
low_occurs_at = plow_timestamp;
if (polyPredicty > lowMarkIndicator) {
polyBgValues.add(new PointValue((float) (plow_timestamp / FUZZER), (float) polyPredicty));
}
}
}
Log.i(TAG, "LOW PREDICTED AT: " + JoH.dateTimeText((long) low_occurs_at));
predictivehours = Math.max(predictivehours, (int) ((low_occurs_at - plow_now) / (60 * 60 * 1000)) + 1);
}
}
} catch (NullPointerException e) {
// Log.d(TAG,"Error with low prediction trend: "+e.toString());
}
final boolean show_noise_working_line;
if (last_noise > NOISE_TRIGGER || (last_noise > BgGraphBuilder.NOISE_TRIGGER_ULTRASENSITIVE && Pref.getBooleanDefaultFalse("engineering_mode") && Pref.getBooleanDefaultFalse("bg_compensate_noise_ultrasensitive"))) {
show_noise_working_line = true;
} else {
show_noise_working_line = prefs.getBoolean("show_noise_workings", false);
}
// noise debug
try {
// overlay noise curve
if ((show_noise_working_line) && (prediction_enabled) && (noisePoly != null)) {
for (BgReading bgReading : bgReadings) {
// only show working curve for last x hours to a
if ((bgReading.timestamp > oldest_noise_timestamp) && (bgReading.timestamp > last_calibration)) {
double polyPredicty = unitized(noisePoly.predict(bgReading.timestamp));
if (d)
Log.d(TAG, "noise Poly predict: " + JoH.qs(polyPredicty) + " @ " + JoH.qs(bgReading.timestamp));
if ((polyPredicty < highMark) && (polyPredicty > 0)) {
PointValue zv = new PointValue((float) (bgReading.timestamp / FUZZER), (float) polyPredicty);
noisePolyBgValues.add(zv);
}
}
}
}
} catch (Exception e) {
Log.e(TAG, "Error creating noise working trend: " + e.toString());
}
try {
// display treatment blobs and annotations
for (Treatments treatment : treatments) {
double height = 6 * bgScale;
if (treatment.insulin > 0)
// some scaling needed I think
height = treatment.insulin;
if (height > highMark)
height = highMark;
if (height < lowMark)
height = lowMark;
final PointValueExtended pv = new PointValueExtended((float) (treatment.timestamp / FUZZER), (float) height);
String mylabel = "";
if (treatment.insulin > 0) {
if (mylabel.length() > 0)
mylabel = mylabel + System.getProperty("line.separator");
mylabel = mylabel + (JoH.qs(treatment.insulin, 2) + "u").replace(".0u", "u");
}
if (treatment.carbs > 0) {
if (mylabel.length() > 0)
mylabel = mylabel + System.getProperty("line.separator");
mylabel = mylabel + (JoH.qs(treatment.carbs, 1) + "g").replace(".0g", "g");
}
// standard label
pv.setLabel(mylabel);
// Log.d(TAG, "watchkeypad pv.mylabel: " + mylabel);
if ((treatment.notes != null) && (treatment.notes.length() > 0)) {
pv.note = treatment.notes;
// Log.d(TAG, "watchkeypad pv.note: " + pv.note + " mylabel: " + mylabel);
try {
final Pattern p = Pattern.compile(".*?pos:([0-9.]+).*");
final Matcher m = p.matcher(treatment.enteredBy);
if (m.matches()) {
pv.set(pv.getX(), (float) JoH.tolerantParseDouble(m.group(1)));
}
} catch (Exception e) {
Log.d(TAG, "Exception matching position: " + e);
}
} else {
pv.note = treatment.getBestShortText();
}
if (treatmentValues.size() > 0) {
// not sure if this >1 is right really - needs a review
PointValue lastpv = treatmentValues.get(treatmentValues.size() - 1);
if (Math.abs(lastpv.getX() - pv.getX()) < ((10 * 60 * 1000) / FUZZER)) {
// merge label with previous - Intelligent parsing and additions go here
if (d)
Log.d(TAG, "Merge treatment difference: " + Float.toString(lastpv.getX() - pv.getX()));
String lastlabel = String.valueOf(lastpv.getLabelAsChars());
if (lastlabel.length() > 0) {
lastpv.setLabel(lastlabel + "+" + mylabel);
pv.setLabel("");
}
}
}
// hover
treatmentValues.add(pv);
if (d)
Log.d(TAG, "Treatment total record: " + Double.toString(height) + " " + " timestamp: " + Long.toString(treatment.timestamp));
}
} catch (Exception e) {
Log.e(TAG, "Exception doing treatment values in bggraphbuilder: " + e.toString());
}
try {
// we need to check we actually have sufficient data for this
double predictedbg = -1000;
BgReading mylastbg = bgReadings.get(0);
double lasttimestamp = 0;
// this can be optimised to oncreate and onchange
// TODO handle this better now we use profile time blocks
Profile.reloadPreferencesIfNeeded(prefs);
try {
if (mylastbg != null) {
if (doMgdl) {
predictedbg = mylastbg.calculated_value;
} else {
predictedbg = mylastbg.calculated_value_mmol();
}
// if (d) Log.d(TAG, "Starting prediction with bg of: " + JoH.qs(predictedbg));
lasttimestamp = mylastbg.timestamp / FUZZER;
if (d)
Log.d(TAG, "Starting prediction with bg of: " + JoH.qs(predictedbg) + " secs ago: " + (JoH.ts() - mylastbg.timestamp) / 1000);
} else {
Log.i(TAG, "COULD NOT GET LAST BG READING FOR PREDICTION!!!");
}
} catch (Exception e) {
// could not get a bg reading
}
final double iobscale = 1 * bgScale;
final double cobscale = 0.2 * bgScale;
final double initial_predicted_bg = predictedbg;
final double relaxed_predicted_bg_limit = initial_predicted_bg * 1.20;
final double cob_insulin_max_draw_value = highMark * 1.20;
// final List<Iob> iobinfo_old = Treatments.ioBForGraph(numValues, (start_time * FUZZER));
// for test
final List<Iob> iobinfo = (simulation_enabled) ? Treatments.ioBForGraph_new(NUM_VALUES, (start_time * FUZZER)) : null;
// initial value in case there are no iob records
long fuzzed_timestamp = (long) end_time;
if (d)
Log.d(TAG, "Internal date timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", new java.util.Date()));
if (d)
Log.d(TAG, "initial Fuzzed end timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", fuzzed_timestamp * FUZZER));
if (d)
Log.d(TAG, "initial Fuzzed start timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", (long) start_time * FUZZER));
if ((iobinfo != null) && (prediction_enabled) && (simulation_enabled)) {
double predict_weight = 0.1;
boolean iob_shown_already = false;
for (Iob iob : iobinfo) {
// double activity = iob.activity;
if ((iob.iob > 0) || (iob.cob > 0) || (iob.jActivity > 0) || (iob.jCarbImpact > 0)) {
fuzzed_timestamp = iob.timestamp / FUZZER;
if (d)
Log.d(TAG, "iob timestamp: " + iob.timestamp);
if (iob.iob > Profile.minimum_shown_iob) {
double height = iob.iob * iobscale;
if (height > cob_insulin_max_draw_value)
height = cob_insulin_max_draw_value;
PointValue pv = new PointValue((float) fuzzed_timestamp, (float) height);
iobValues.add(pv);
// currently scaled by profile
double activityheight = iob.jActivity * 3;
if (activityheight > cob_insulin_max_draw_value)
activityheight = cob_insulin_max_draw_value;
PointValue av = new PointValue((float) fuzzed_timestamp, (float) activityheight);
activityValues.add(av);
}
if (iob.cob > 0) {
double height = iob.cob * cobscale;
if (height > cob_insulin_max_draw_value)
height = cob_insulin_max_draw_value;
PointValue pv = new PointValue((float) fuzzed_timestamp, (float) height);
if (d)
Log.d(TAG, "Cob total record: " + JoH.qs(height) + " " + JoH.qs(iob.cob) + " " + Float.toString(pv.getY()) + " @ timestamp: " + Long.toString(iob.timestamp));
// warning should not be hardcoded
cobValues.add(pv);
}
// do we actually need to calculate this within the loop - can we use only the last datum?
if (fuzzed_timestamp > (lasttimestamp)) {
double polyPredict = 0;
if (poly != null) {
try {
polyPredict = poly.predict(iob.timestamp);
if (d)
Log.d(TAG, "Poly predict: " + JoH.qs(polyPredict) + " @ " + JoH.dateTimeText(iob.timestamp));
if (show_moment_working_line) {
if (((polyPredict < highMark) || (polyPredict < initial_predicted_bg)) && (polyPredict > 0)) {
PointValue zv = new PointValue((float) fuzzed_timestamp, (float) polyPredict);
polyBgValues.add(zv);
}
}
} catch (Exception e) {
Log.e(TAG, "Got exception with poly predict: " + e.toString());
}
}
if (d)
Log.d(TAG, "Processing prediction: before: " + JoH.qs(predictedbg) + " activity: " + JoH.qs(iob.jActivity) + " jcarbimpact: " + JoH.qs(iob.jCarbImpact));
// lower bg by current insulin activity
predictedbg -= iob.jActivity;
predictedbg += iob.jCarbImpact;
double predictedbg_final = predictedbg;
// add momentum characteristics if we have them
final boolean momentum_smoothing = true;
if ((predict_use_momentum) && (polyPredict > 0)) {
predictedbg_final = ((predictedbg * predict_weight) + polyPredict) / (predict_weight + 1);
if (momentum_smoothing)
predictedbg = predictedbg_final;
if (d)
Log.d(TAG, "forecast predict_weight: " + JoH.qs(predict_weight));
}
// from 0-infinity - // TODO account for step!!!
predict_weight = predict_weight * 2.5;
// we should pull in actual graph upper and lower limits here
if (((predictedbg_final < cob_insulin_max_draw_value) || (predictedbg_final < relaxed_predicted_bg_limit)) && (predictedbg_final > 0)) {
PointValue zv = new PointValue((float) fuzzed_timestamp, (float) predictedbg_final);
predictedBgValues.add(zv);
}
}
if (fuzzed_timestamp > end_time) {
// round up to nearest future hour - timestamps in minutes here
predictivehours = (int) (((fuzzed_timestamp - end_time) * FUZZER) / (1000 * 60 * 60)) + 1;
if (d)
Log.d(TAG, "Predictive hours updated to: " + predictivehours);
} else {
// KS Log.d(TAG, "IOB DEBUG: " + (fuzzed_timestamp - end_time) + " " + iob.iob);
if (!iob_shown_already && (Math.abs(fuzzed_timestamp - end_time) < 5) && (iob.iob > 0)) {
iob_shown_already = true;
// show current iob
// double position = 12.4 * bgScale; // this is for mmol - needs generic for mg/dl
// if (Math.abs(predictedbg - position) < (2 * bgScale)) {
// position = 7.0 * bgScale;
// }
// PointValue iv = new PointValue((float) fuzzed_timestamp, (float) position);
DecimalFormat df = new DecimalFormat("#");
df.setMaximumFractionDigits(2);
df.setMinimumIntegerDigits(1);
// iv.setLabel("IoB: " + df.format(iob.iob));
Home.updateStatusLine("iob", df.format(iob.iob));
// annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
}
}
}
}
if (d)
Log.i(TAG, "Size of iob: " + Integer.toString(iobinfo.size()) + " Predictive hours: " + Integer.toString(predictivehours) + " Predicted end game change: " + JoH.qs(predictedbg - mylastbg.calculated_value_mmol()) + " Start bg: " + JoH.qs(mylastbg.calculated_value_mmol()) + " Predicted: " + JoH.qs(predictedbg));
// calculate bolus or carb adjustment - these should have granularity for injection / pump and thresholds
} else {
if (d)
Log.i(TAG, "iobinfo was null");
}
double[] evaluation;
if (prediction_enabled && simulation_enabled) {
// if (doMgdl) {
// These routines need to understand how the profile is defined to use native instead of scaled
evaluation = Profile.evaluateEndGameMmol(predictedbg, lasttimestamp * FUZZER, end_time * FUZZER);
// } else {
// evaluation = Profile.evaluateEndGameMmol(predictedbg, lasttimestamp * FUZZER, end_time * FUZZER);
// }
String bwp_update = "";
keyStore.putL("bwp_last_insulin_timestamp", -1);
if (d)
Log.i(TAG, "Predictive BWP: Current prediction: " + JoH.qs(predictedbg) + " / carbs: " + JoH.qs(evaluation[0]) + " insulin: " + JoH.qs(evaluation[1]));
if (!BgReading.isDataStale()) {
if (((low_occurs_at < 1) || Pref.getBooleanDefaultFalse("always_show_bwp")) && (Pref.getBooleanDefaultFalse("show_bwp"))) {
if (evaluation[0] > Profile.minimum_carb_recommendation) {
// PointValue iv = new PointValue((float) fuzzed_timestamp, (float) (10 * bgScale));
// iv.setLabel("+Carbs: " + JoH.qs(evaluation[0], 0));
bwp_update = "\u224F" + " Carbs: " + JoH.qs(evaluation[0], 0);
// annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
} else if (evaluation[1] > Profile.minimum_insulin_recommendation) {
// PointValue iv = new PointValue((float) fuzzed_timestamp, (float) (11 * bgScale));
// iv.setLabel("+Insulin: " + JoH.qs(evaluation[1], 1));
keyStore.putS("bwp_last_insulin", JoH.qs(evaluation[1], 1) + ((low_occurs_at > 0) ? ("!") : ""));
keyStore.putL("bwp_last_insulin_timestamp", JoH.tsl());
// warning symbol
bwp_update = "\u224F" + " Insulin: " + JoH.qs(evaluation[1], 1) + ((low_occurs_at > 0) ? (" " + "\u26A0") : "");
// annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
}
}
}
// always send so we can blank if needed
Home.updateStatusLine("bwp", bwp_update);
}
} catch (Exception e) {
Log.e(TAG, "Exception doing iob values in bggraphbuilder: " + e.toString());
}
}
// if !simple
} finally {
readings_lock.unlock();
}
}
use of com.eveningoutpost.dexdrip.Models.Treatments in project xDrip by NightscoutFoundation.
the class NightscoutUploader method doRESTtreatmentDownload.
private synchronized boolean doRESTtreatmentDownload(SharedPreferences prefs) {
final String baseURLSettings = prefs.getString("cloud_storage_api_base", "");
final ArrayList<String> baseURIs = new ArrayList<>();
boolean new_data = false;
Log.d(TAG, "doRESTtreatmentDownload() starting run");
try {
for (String baseURLSetting : baseURLSettings.split(" ")) {
String baseURL = baseURLSetting.trim();
if (baseURL.isEmpty())
continue;
baseURIs.add(baseURL + (baseURL.endsWith("/") ? "" : "/"));
}
} catch (Exception e) {
Log.e(TAG, "Unable to process API Base URL: " + e);
return false;
}
// process a list of base uris
for (String baseURI : baseURIs) {
try {
int apiVersion = 0;
URI uri = new URI(baseURI);
if ((uri.getHost().startsWith("192.168.")) && prefs.getBoolean("skip_lan_uploads_when_no_lan", true) && (!JoH.isLANConnected())) {
Log.d(TAG, "Skipping Nighscout download from: " + uri.getHost() + " due to no LAN connection");
continue;
}
if (uri.getPath().endsWith("/v1/"))
apiVersion = 1;
String baseURL;
String secret = uri.getUserInfo();
if ((secret == null || secret.isEmpty()) && apiVersion == 0) {
baseURL = baseURI;
} else if ((secret == null || secret.isEmpty())) {
throw new Exception("Starting with API v1, a pass phase is required");
} else if (apiVersion > 0) {
baseURL = baseURI.replaceFirst("//[^@]+@", "//");
} else {
throw new Exception("Unexpected baseURI: " + baseURI);
}
final Retrofit retrofit = new Retrofit.Builder().baseUrl(baseURL).client(client).build();
final NightscoutService nightscoutService = retrofit.create(NightscoutService.class);
final String checkurl = retrofit.baseUrl().url().toString();
if (!isNightscoutCompatible(checkurl)) {
Log.e(TAG, "Nightscout version: " + getNightscoutVersion(checkurl) + " on " + checkurl + " is not compatible with the Rest-API download feature!");
continue;
}
if (apiVersion == 1) {
final String hashedSecret = Hashing.sha1().hashBytes(secret.getBytes(Charsets.UTF_8)).toString();
final Response<ResponseBody> r;
if (hashedSecret != null) {
// update status if needed
doStatusUpdate(nightscoutService, retrofit.baseUrl().url().toString(), hashedSecret);
// per uri marker
final String LAST_MODIFIED_KEY = LAST_SUCCESS_TREATMENT_DOWNLOAD + CipherUtils.getMD5(uri.toString());
String last_modified_string = PersistentStore.getString(LAST_MODIFIED_KEY);
if (last_modified_string.equals(""))
last_modified_string = JoH.getRFC822String(0);
final long request_start = JoH.tsl();
r = nightscoutService.downloadTreatments(hashedSecret, last_modified_string).execute();
if ((r != null) && (r.raw().networkResponse().code() == HttpURLConnection.HTTP_NOT_MODIFIED)) {
Log.d(TAG, "Treatments on " + uri.getHost() + ":" + uri.getPort() + " not modified since: " + last_modified_string);
// skip further processing of this url
continue;
}
if ((r != null) && (r.isSuccess())) {
last_modified_string = r.raw().header("Last-Modified", JoH.getRFC822String(request_start));
final String this_etag = r.raw().header("Etag", "");
if (this_etag.length() > 0) {
// older versions of nightscout don't support if-modified-since so check the etag for duplication
if (this_etag.equals(PersistentStore.getString(ETAG + LAST_MODIFIED_KEY))) {
Log.d(TAG, "Skipping Treatments on " + uri.getHost() + ":" + uri.getPort() + " due to etag duplicate: " + this_etag);
continue;
}
PersistentStore.setString(ETAG + LAST_MODIFIED_KEY, this_etag);
}
final String response = r.body().string();
if (d)
Log.d(TAG, "Response: " + response);
final JSONArray jsonArray = new JSONArray(response);
for (int i = 0; i < jsonArray.length(); i++) {
final JSONObject tr = (JSONObject) jsonArray.get(i);
final String etype = tr.has("eventType") ? tr.getString("eventType") : "<null>";
// TODO if we are using upsert then we should favour _id over uuid!?
final String uuid = (tr.has("uuid") && (tr.getString("uuid") != null)) ? tr.getString("uuid") : UUID.nameUUIDFromBytes(tr.getString("_id").getBytes("UTF-8")).toString();
final String nightscout_id = (tr.getString("_id") == null) ? uuid : tr.getString("_id");
if (bad_uuids.contains(nightscout_id)) {
Log.d(TAG, "Skipping previously baulked uuid: " + nightscout_id);
continue;
}
if (d)
Log.d(TAG, "event: " + etype + "_id: " + nightscout_id + " uuid:" + uuid);
boolean from_xdrip = false;
try {
if (tr.getString("enteredBy").startsWith(Treatments.XDRIP_TAG)) {
from_xdrip = true;
if (d)
Log.d(TAG, "This record came from xDrip");
}
} catch (JSONException e) {
//
}
// extract blood test data if present
try {
if (!from_xdrip) {
if (tr.getString("glucoseType").equals("Finger")) {
if (bad_bloodtest_uuids.contains(nightscout_id)) {
Log.d(TAG, "Skipping baulked bloodtest nightscout id: " + nightscout_id);
continue;
}
final BloodTest existing = BloodTest.byUUID(uuid);
if (existing == null) {
final long timestamp = DateUtil.tolerantFromISODateString(tr.getString("created_at")).getTime();
double mgdl = JoH.tolerantParseDouble(tr.getString("glucose"));
if (tr.getString("units").equals("mmol"))
mgdl = mgdl * Constants.MMOLL_TO_MGDL;
final BloodTest bt = BloodTest.create(timestamp, mgdl, tr.getString("enteredBy") + " " + VIA_NIGHTSCOUT_TAG);
if (bt != null) {
// override random uuid with nightscout one
bt.uuid = uuid;
bt.saveit();
new_data = true;
Log.ueh(TAG, "Received new Bloodtest data from Nightscout: " + BgGraphBuilder.unitized_string_with_units_static(mgdl) + " @ " + JoH.dateTimeText(timestamp));
} else {
Log.d(TAG, "Error creating bloodtest record: " + mgdl + " mgdl " + tr.toString());
bad_bloodtest_uuids.add(nightscout_id);
}
} else {
if (d)
Log.d(TAG, "Already a bloodtest with uuid: " + uuid);
}
} else {
if (JoH.quietratelimit("blood-test-type-finger", 2)) {
Log.e(TAG, "Cannot use bloodtest which is not type Finger: " + tr.getString("glucoseType"));
}
}
}
} catch (JSONException e) {
// Log.d(TAG, "json processing: " + e);
}
// extract treatment data if present
double carbs = 0;
double insulin = 0;
String notes = null;
try {
carbs = tr.getDouble("carbs");
} catch (JSONException e) {
// Log.d(TAG, "json processing: " + e);
}
try {
insulin = tr.getDouble("insulin");
} catch (JSONException e) {
// Log.d(TAG, "json processing: " + e);
}
try {
notes = tr.getString("notes");
} catch (JSONException e) {
// Log.d(TAG, "json processing: " + e);
}
if ((notes != null) && ((notes.equals("AndroidAPS started") || notes.equals("null") || (notes.equals("Bolus Std")))))
notes = null;
if ((carbs > 0) || (insulin > 0) || (notes != null)) {
final long timestamp = DateUtil.tolerantFromISODateString(tr.getString("created_at")).getTime();
if (timestamp > 0) {
if (d)
Log.d(TAG, "Treatment: Carbs: " + carbs + " Insulin: " + insulin + " timestamp: " + timestamp);
Treatments existing = Treatments.byuuid(nightscout_id);
if (existing == null)
existing = Treatments.byuuid(uuid);
if ((existing == null) && (!from_xdrip)) {
// check for close timestamp duplicates perhaps
existing = Treatments.byTimestamp(timestamp, 60000);
if (!((existing != null) && (JoH.roundDouble(existing.insulin, 2) == JoH.roundDouble(insulin, 2)) && (JoH.roundDouble(existing.carbs, 2) == JoH.roundDouble(carbs, 2)) && ((existing.notes == null && notes == null) || ((existing.notes != null) && existing.notes.equals(notes != null ? notes : ""))))) {
Log.ueh(TAG, "New Treatment from Nightscout: Carbs: " + carbs + " Insulin: " + insulin + " timestamp: " + JoH.dateTimeText(timestamp) + ((notes != null) ? " Note: " + notes : ""));
final Treatments t;
if ((carbs > 0) || (insulin > 0)) {
t = Treatments.create(carbs, insulin, timestamp, nightscout_id);
if (notes != null)
t.notes = notes;
} else {
t = Treatments.create_note(notes, timestamp, -1, nightscout_id);
if (t == null) {
Log.d(TAG, "Create note baulked and returned null, so skipping");
bad_uuids.add(nightscout_id);
continue;
}
}
// t.uuid = nightscout_id; // replace with nightscout uuid
try {
t.enteredBy = tr.getString("enteredBy") + " " + VIA_NIGHTSCOUT_TAG;
} catch (JSONException e) {
t.enteredBy = VIA_NIGHTSCOUT_TAG;
}
t.save();
// pushTreatmentSync(t, false);
if (Home.get_show_wear_treatments())
pushTreatmentSyncToWatch(t, true);
new_data = true;
} else {
Log.e(TAG, "Skipping treatment as it appears identical to one we already have: " + JoH.dateTimeText(timestamp) + " " + insulin + " " + carbs + " " + notes);
}
} else {
if (existing != null) {
if (d)
Log.d(TAG, "Treatment with uuid: " + uuid + " / " + nightscout_id + " already exists");
if (notes == null)
notes = "";
if (existing.notes == null)
existing.notes = "";
if ((existing.carbs != carbs) || (existing.insulin != insulin) || ((existing.timestamp / Constants.SECOND_IN_MS) != (timestamp / Constants.SECOND_IN_MS)) || (!existing.notes.contains(notes))) {
Log.ueh(TAG, "Treatment changes from Nightscout: " + carbs + " Insulin: " + insulin + " timestamp: " + JoH.dateTimeText(timestamp) + " " + notes + " " + " vs " + existing.carbs + " " + existing.insulin + " " + JoH.dateTimeText(existing.timestamp) + " " + existing.notes);
existing.carbs = carbs;
existing.insulin = insulin;
existing.timestamp = timestamp;
existing.created_at = DateUtil.toISOString(timestamp);
if (existing.notes.length() > 0) {
existing.notes += " \u2192 " + notes;
} else {
existing.notes = notes;
}
existing.save();
if (Home.get_show_wear_treatments())
pushTreatmentSyncToWatch(existing, false);
new_data = true;
}
} else {
Log.d(TAG, "Skipping record creation as original source is xDrip");
}
}
}
}
}
PersistentStore.setString(LAST_MODIFIED_KEY, last_modified_string);
checkGzipSupport(r);
} else {
Log.d(TAG, "Failed to get treatments from: " + baseURI);
}
} else {
Log.d(TAG, "Old api version not supported");
}
}
} catch (Exception e) {
String msg = "Unable to do REST API Download " + e + " " + e.getMessage() + " url: " + baseURI;
handleRestFailure(msg);
}
}
Log.d(TAG, "doRESTtreatmentDownload() finishing run");
return new_data;
}
Aggregations