Search in sources :

Example 36 with Calibration

use of com.eveningoutpost.dexdrip.Models.Calibration in project xDrip by NightscoutFoundation.

the class BgGraphBuilder method addBgReadingValues.

private synchronized void addBgReadingValues(final boolean simple) {
    if (readings_lock.isLocked()) {
        Log.d(TAG, "BgReadings lock is currently held");
    }
    readings_lock.lock();
    try {
        if (plugin_adjusted) {
            Log.i(TAG, "Reloading as Plugin modified data: " + JoH.backTrace(1) + " size:" + bgReadings.size());
            bgReadings.clear();
            bgReadings.addAll(BgReading.latestForGraph(loaded_numValues, loaded_start, loaded_end));
        } else {
        // Log.d(TAG, "not adjusted");
        }
        filteredValues.clear();
        rawInterpretedValues.clear();
        iobValues.clear();
        activityValues.clear();
        cobValues.clear();
        predictedBgValues.clear();
        polyBgValues.clear();
        noisePolyBgValues.clear();
        annotationValues.clear();
        treatmentValues.clear();
        highValues.clear();
        lowValues.clear();
        inRangeValues.clear();
        calibrationValues.clear();
        bloodTestValues.clear();
        pluginValues.clear();
        final double bgScale = bgScale();
        final double now = JoH.ts();
        // most recent bgreading timestamp we have
        long highest_bgreading_timestamp = -1;
        // 10 minutes // TODO MAKE PREFERENCE?
        double trend_start_working = now - (1000 * 60 * 12);
        if (bgReadings.size() > 0) {
            highest_bgreading_timestamp = bgReadings.get(0).timestamp;
            final double ms_since_last_reading = now - highest_bgreading_timestamp;
            if (ms_since_last_reading < 500000) {
                // push back start of trend calc window
                trend_start_working -= ms_since_last_reading;
                Log.d(TAG, "Pushed back trend start by: " + JoH.qs(ms_since_last_reading / 1000) + " secs - last reading: " + JoH.dateTimeText(highest_bgreading_timestamp));
            }
        }
        final double trendstart = trend_start_working;
        // 20 minutes // TODO MAKE PREFERENCE
        final double noise_trendstart = now - (1000 * 60 * 20);
        double oldest_noise_timestamp = now;
        double newest_noise_timestamp = 0;
        TrendLine[] polys = new TrendLine[5];
        polys[0] = new PolyTrendLine(1);
        // polys[1] = new PolyTrendLine(2);
        polys[1] = new Forecast.LogTrendLine();
        polys[2] = new Forecast.ExpTrendLine();
        polys[3] = new Forecast.PowerTrendLine();
        TrendLine poly = null;
        final List<Double> polyxList = new ArrayList<>();
        final List<Double> polyyList = new ArrayList<>();
        final List<Double> noise_polyxList = new ArrayList<>();
        final List<Double> noise_polyyList = new ArrayList<>();
        // 8 hours
        final double avg1start = now - (1000 * 60 * 60 * 8);
        // 8 hours
        final double momentum_illustration_start = now - (1000 * 60 * 60 * 2);
        avg1startfuzzed = avg1start / FUZZER;
        avg1value = 0;
        avg1counter = 0;
        avg2value = 0;
        avg2counter = 0;
        double last_calibration = 0;
        double last_bloodtest = 0;
        if (doMgdl) {
            Profile.scale_factor = Constants.MMOLL_TO_MGDL;
        } else {
            Profile.scale_factor = 1;
        }
        final long close_to_side_time = (long) (end_time * FUZZER) - (Constants.MINUTE_IN_MS * 10);
        // enumerate calibrations
        try {
            for (Calibration calibration : calibrations) {
                if (calibration.timestamp < (start_time * FUZZER))
                    break;
                if (calibration.slope_confidence != 0) {
                    final long adjusted_timestamp = (calibration.timestamp + (AddCalibration.estimatedInterstitialLagSeconds * 1000));
                    final PointValueExtended this_point = new PointValueExtended((float) (adjusted_timestamp / FUZZER), (float) unitized(calibration.bg));
                    if (adjusted_timestamp >= close_to_side_time) {
                        predictivehours = Math.max(predictivehours, 1);
                    }
                    this_point.real_timestamp = calibration.timestamp;
                    calibrationValues.add(this_point);
                    if (calibration.timestamp > last_calibration) {
                        last_calibration = calibration.timestamp;
                    }
                }
            }
        } catch (Exception e) {
            Log.e(TAG, "Exception doing calibration values in bggraphbuilder: " + e.toString());
        }
        // enumerate blood tests
        try {
            for (BloodTest bloodtest : bloodtests) {
                final long adjusted_timestamp = (bloodtest.timestamp + (AddCalibration.estimatedInterstitialLagSeconds * 1000));
                final PointValueExtended this_point = new PointValueExtended((float) (adjusted_timestamp / FUZZER), (float) unitized(bloodtest.mgdl));
                this_point.type = PointValueExtended.BloodTest;
                this_point.uuid = bloodtest.uuid;
                this_point.real_timestamp = bloodtest.timestamp;
                // exclude any which have been used for calibration
                boolean matches = false;
                for (PointValue calibration_point : calibrationValues) {
                    if ((Math.abs(calibration_point.getX() - this_point.getX())) <= ((AddCalibration.estimatedInterstitialLagSeconds * 1000) / FUZZER) && (calibration_point.getY() == calibration_point.getY())) {
                        matches = true;
                        break;
                    }
                }
                if (!matches)
                    bloodTestValues.add(this_point);
                if (bloodtest.timestamp > last_bloodtest) {
                    last_bloodtest = bloodtest.timestamp;
                }
                if (adjusted_timestamp >= close_to_side_time) {
                    predictivehours = Math.max(predictivehours, 1);
                }
            }
        } catch (Exception e) {
            Log.e(TAG, "Exception doing calibration values in bggraphbuilder: " + e.toString());
        }
        final boolean has_filtered = DexCollectionType.hasFiltered();
        final boolean predict_use_momentum = prefs.getBoolean("predict_use_momentum", true);
        final boolean show_moment_working_line = prefs.getBoolean("show_momentum_working_line", false);
        final boolean interpret_raw = prefs.getBoolean("interpret_raw", false);
        final boolean show_filtered = prefs.getBoolean("show_filtered_curve", false) && has_filtered;
        final boolean predict_lows = prefs.getBoolean("predict_lows", true);
        final boolean show_plugin = prefs.getBoolean("plugin_plot_on_graph", false);
        final boolean glucose_from_plugin = prefs.getBoolean("display_glucose_from_plugin", false);
        if ((Home.get_follower()) && (bgReadings.size() < 3)) {
            GcmActivity.requestBGsync();
        }
        final CalibrationAbstract plugin = (show_plugin) ? PluggableCalibration.getCalibrationPluginFromPreferences() : null;
        CalibrationAbstract.CalibrationData cd = (plugin != null) ? plugin.getCalibrationData() : null;
        int cdposition = 0;
        if ((glucose_from_plugin) && (cd != null)) {
            // plugin will be adjusting data
            plugin_adjusted = true;
        }
        for (final BgReading bgReading : bgReadings) {
            if ((cd != null) && (calibrations.size() > 0)) {
                while ((bgReading.timestamp < calibrations.get(cdposition).timestamp) || (calibrations.get(cdposition).slope == 0)) {
                    Log.d(TAG, "BG reading earlier than calibration at index: " + cdposition + "  " + JoH.dateTimeText(bgReading.timestamp) + " cal: " + JoH.dateTimeText(calibrations.get(cdposition).timestamp));
                    if (cdposition < calibrations.size() - 1) {
                        cdposition++;
                        // cd = (plugin != null) ? plugin.getCalibrationData(calibrations.get(cdposition).timestamp) : null;
                        final CalibrationAbstract.CalibrationData oldcd = cd;
                        cd = plugin.getCalibrationData(calibrations.get(cdposition).timestamp);
                        if (cd == null) {
                            Log.d(TAG, "cd went to null during adjustment - likely graph spans multiple sensors");
                            cd = oldcd;
                        }
                        Log.d(TAG, "Now using calibration from: " + JoH.dateTimeText(calibrations.get(cdposition).timestamp) + " slope: " + cd.slope + " intercept: " + cd.intercept);
                    } else {
                        Log.d(TAG, "No more calibrations to choose from");
                        break;
                    }
                }
            }
            // swap main and plugin plot if display glucose is from plugin
            if ((glucose_from_plugin) && (cd != null)) {
                pluginValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
                // recalculate from plugin - beware floating / cached references!
                bgReading.calculated_value = plugin.getGlucoseFromBgReading(bgReading, cd);
                bgReading.filtered_calculated_value = plugin.getGlucoseFromFilteredBgReading(bgReading, cd);
            }
            if ((show_filtered) && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
                filteredValues.add(new PointValue((float) ((bgReading.timestamp - timeshift) / FUZZER), (float) unitized(bgReading.filtered_calculated_value)));
            }
            if ((interpret_raw && (bgReading.raw_calculated > 0))) {
                rawInterpretedValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.raw_calculated)));
            }
            if ((!glucose_from_plugin) && (plugin != null) && (cd != null)) {
                pluginValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(plugin.getGlucoseFromBgReading(bgReading, cd))));
            }
            if (bgReading.calculated_value >= 400) {
                highValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(400)));
            } else if (unitized(bgReading.calculated_value) >= highMark) {
                highValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
            } else if (unitized(bgReading.calculated_value) >= lowMark) {
                inRangeValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
            } else if (bgReading.calculated_value >= 40) {
                lowValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(bgReading.calculated_value)));
            } else if (bgReading.calculated_value > 13) {
                lowValues.add(new PointValue((float) (bgReading.timestamp / FUZZER), (float) unitized(40)));
            }
            avg2counter++;
            avg2value += bgReading.calculated_value;
            if (bgReading.timestamp > avg1start) {
                avg1counter++;
                avg1value += bgReading.calculated_value;
            }
            // noise calculator
            if ((!simple || (noise_processed_till_timestamp < highest_bgreading_timestamp)) && (bgReading.timestamp > noise_trendstart) && (bgReading.timestamp > last_calibration)) {
                if (has_filtered && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
                    final double shifted_timestamp = bgReading.timestamp - timeshift;
                    if (shifted_timestamp > last_calibration) {
                        if (shifted_timestamp < oldest_noise_timestamp)
                            oldest_noise_timestamp = shifted_timestamp;
                        noise_polyxList.add(shifted_timestamp);
                        noise_polyyList.add((bgReading.filtered_calculated_value));
                        if (d)
                            Log.d(TAG, "flt noise poly Added: " + noise_polyxList.size() + " " + JoH.qs(noise_polyxList.get(noise_polyxList.size() - 1)) + " / " + JoH.qs(noise_polyyList.get(noise_polyyList.size() - 1), 2));
                    }
                }
                if (bgReading.calculated_value > 0) {
                    if (bgReading.timestamp < oldest_noise_timestamp)
                        oldest_noise_timestamp = bgReading.timestamp;
                    if (bgReading.timestamp > newest_noise_timestamp) {
                        newest_noise_timestamp = bgReading.timestamp;
                        original_value = bgReading.calculated_value;
                    }
                    noise_polyxList.add((double) bgReading.timestamp);
                    noise_polyyList.add((bgReading.calculated_value));
                    if (d)
                        Log.d(TAG, "raw noise poly Added: " + noise_polyxList.size() + " " + JoH.qs(noise_polyxList.get(noise_polyxList.size() - 1)) + " / " + JoH.qs(noise_polyyList.get(noise_polyyList.size() - 1), 2));
                }
            }
            // momentum trend
            if (!simple && (bgReading.timestamp > trendstart) && (bgReading.timestamp > last_calibration)) {
                if (has_filtered && (bgReading.filtered_calculated_value > 0) && (bgReading.filtered_calculated_value != bgReading.calculated_value)) {
                    polyxList.add((double) bgReading.timestamp - timeshift);
                    polyyList.add(unitized(bgReading.filtered_calculated_value));
                }
                if (bgReading.calculated_value > 0) {
                    polyxList.add((double) bgReading.timestamp);
                    polyyList.add(unitized(bgReading.calculated_value));
                }
                if (d)
                    Log.d(TAG, "poly Added: " + JoH.qs(polyxList.get(polyxList.size() - 1)) + " / " + JoH.qs(polyyList.get(polyyList.size() - 1), 2));
            }
        }
        if (avg1counter > 0) {
            avg1value = avg1value / avg1counter;
        }
        if (avg2counter > 0) {
            avg2value = avg2value / avg2counter;
        }
        // always calculate noise if needed
        if (noise_processed_till_timestamp < highest_bgreading_timestamp) {
            // noise evaluate
            Log.d(TAG, "Noise: Processing new data for noise: " + JoH.dateTimeText(noise_processed_till_timestamp) + " vs now: " + JoH.dateTimeText(highest_bgreading_timestamp));
            try {
                if (d)
                    Log.d(TAG, "noise Poly list size: " + noise_polyxList.size());
                // TODO Impossible to satisfy noise evaluation size with only raw data do we want it with raw only??
                if (noise_polyxList.size() > 5) {
                    noisePoly = new PolyTrendLine(2);
                    final double[] noise_polyys = PolyTrendLine.toPrimitiveFromList(noise_polyyList);
                    final double[] noise_polyxs = PolyTrendLine.toPrimitiveFromList(noise_polyxList);
                    noisePoly.setValues(noise_polyys, noise_polyxs);
                    last_noise = noisePoly.errorVarience();
                    if (newest_noise_timestamp > oldest_noise_timestamp) {
                        best_bg_estimate = noisePoly.predict(newest_noise_timestamp);
                        last_bg_estimate = noisePoly.predict(newest_noise_timestamp - DEXCOM_PERIOD);
                    } else {
                        best_bg_estimate = -99;
                        last_bg_estimate = -99;
                    }
                    Log.i(TAG, "Noise: Poly Error Varience: " + JoH.qs(last_noise, 5));
                } else {
                    Log.i(TAG, "Noise: Not enough data to get sensible noise value");
                    noisePoly = null;
                    last_noise = -9999;
                    best_bg_estimate = -9999;
                    last_bg_estimate = -9999;
                }
                // store that we have processed up to this timestamp
                noise_processed_till_timestamp = highest_bgreading_timestamp;
            } catch (Exception e) {
                Log.e(TAG, " Error with noise poly trend: " + e.toString());
            }
        } else {
            Log.d(TAG, "Noise Cached noise timestamp: " + JoH.dateTimeText(noise_processed_till_timestamp));
        }
        if (!simple) {
            // momentum
            try {
                if (d)
                    Log.d(TAG, "moment Poly list size: " + polyxList.size());
                if (polyxList.size() > 1) {
                    final double[] polyys = PolyTrendLine.toPrimitiveFromList(polyyList);
                    final double[] polyxs = PolyTrendLine.toPrimitiveFromList(polyxList);
                    // set and evaluate poly curve models and select first best
                    double min_errors = 9999999;
                    for (TrendLine this_poly : polys) {
                        if (this_poly != null) {
                            if (poly == null)
                                poly = this_poly;
                            this_poly.setValues(polyys, polyxs);
                            if (this_poly.errorVarience() < min_errors) {
                                min_errors = this_poly.errorVarience();
                                poly = this_poly;
                            // if (d) Log.d(TAG, "set forecast best model to: " + poly.getClass().getSimpleName() + " with varience of: " + JoH.qs(poly.errorVarience(),14));
                            }
                        }
                    }
                    if (d)
                        Log.i(TAG, "set forecast best model to: " + poly.getClass().getSimpleName() + " with varience of: " + JoH.qs(poly.errorVarience(), 4));
                } else {
                    if (d)
                        Log.i(TAG, "Not enough data for forecast model");
                }
            } catch (Exception e) {
                Log.e(TAG, " Error with poly trend: " + e.toString());
            }
            try {
                // show trend for whole bg reading area
                if ((show_moment_working_line) && (poly != null)) {
                    for (BgReading bgReading : bgReadings) {
                        // only show working curve for last x hours to a
                        if (bgReading.timestamp > momentum_illustration_start) {
                            double polyPredicty = poly.predict(bgReading.timestamp);
                            // if (d) Log.d(TAG, "Poly predict: "+JoH.qs(polyPredict)+" @ "+JoH.qs(iob.timestamp));
                            if ((polyPredicty < highMark) && (polyPredicty > 0)) {
                                PointValue zv = new PointValue((float) (bgReading.timestamp / FUZZER), (float) polyPredicty);
                                polyBgValues.add(zv);
                            }
                        }
                    }
                }
            } catch (Exception e) {
                Log.e(TAG, "Error creating back trend: " + e.toString());
            }
            // low estimator
            // work backwards to see whether we think a low is estimated
            low_occurs_at = -1;
            try {
                if ((predict_lows) && (prediction_enabled) && (poly != null)) {
                    final double offset = ActivityRecognizedService.raise_limit_due_to_vehicle_mode() ? unitized(ActivityRecognizedService.getVehicle_mode_adjust_mgdl()) : 0;
                    final double plow_now = JoH.ts();
                    // max look-ahead
                    double plow_timestamp = plow_now + (1000 * 60 * 99);
                    double polyPredicty = poly.predict(plow_timestamp);
                    Log.d(TAG, "Low predictor at max lookahead is: " + JoH.qs(polyPredicty));
                    // store that we have processed up to this timestamp
                    low_occurs_at_processed_till_timestamp = highest_bgreading_timestamp;
                    if (polyPredicty <= (lowMark + offset)) {
                        low_occurs_at = plow_timestamp;
                        final double lowMarkIndicator = (lowMark - (lowMark / 4));
                        // if (d) Log.d(TAG, "Poly predict: "+JoH.qs(polyPredict)+" @ "+JoH.qsz(iob.timestamp));
                        while (plow_timestamp > plow_now) {
                            plow_timestamp = plow_timestamp - FUZZER;
                            polyPredicty = poly.predict(plow_timestamp);
                            if (polyPredicty > (lowMark + offset)) {
                                PointValue zv = new PointValue((float) (plow_timestamp / FUZZER), (float) polyPredicty);
                                polyBgValues.add(zv);
                            } else {
                                low_occurs_at = plow_timestamp;
                                if (polyPredicty > lowMarkIndicator) {
                                    polyBgValues.add(new PointValue((float) (plow_timestamp / FUZZER), (float) polyPredicty));
                                }
                            }
                        }
                        Log.i(TAG, "LOW PREDICTED AT: " + JoH.dateTimeText((long) low_occurs_at));
                        predictivehours = Math.max(predictivehours, (int) ((low_occurs_at - plow_now) / (60 * 60 * 1000)) + 1);
                    }
                }
            } catch (NullPointerException e) {
            // Log.d(TAG,"Error with low prediction trend: "+e.toString());
            }
            final boolean show_noise_working_line;
            if (last_noise > NOISE_TRIGGER || (last_noise > BgGraphBuilder.NOISE_TRIGGER_ULTRASENSITIVE && Pref.getBooleanDefaultFalse("engineering_mode") && Pref.getBooleanDefaultFalse("bg_compensate_noise_ultrasensitive"))) {
                show_noise_working_line = true;
            } else {
                show_noise_working_line = prefs.getBoolean("show_noise_workings", false);
            }
            // noise debug
            try {
                // overlay noise curve
                if ((show_noise_working_line) && (prediction_enabled) && (noisePoly != null)) {
                    for (BgReading bgReading : bgReadings) {
                        // only show working curve for last x hours to a
                        if ((bgReading.timestamp > oldest_noise_timestamp) && (bgReading.timestamp > last_calibration)) {
                            double polyPredicty = unitized(noisePoly.predict(bgReading.timestamp));
                            if (d)
                                Log.d(TAG, "noise Poly predict: " + JoH.qs(polyPredicty) + " @ " + JoH.qs(bgReading.timestamp));
                            if ((polyPredicty < highMark) && (polyPredicty > 0)) {
                                PointValue zv = new PointValue((float) (bgReading.timestamp / FUZZER), (float) polyPredicty);
                                noisePolyBgValues.add(zv);
                            }
                        }
                    }
                }
            } catch (Exception e) {
                Log.e(TAG, "Error creating noise working trend: " + e.toString());
            }
            try {
                // display treatment blobs and annotations
                for (Treatments treatment : treatments) {
                    double height = 6 * bgScale;
                    if (treatment.insulin > 0)
                        // some scaling needed I think
                        height = treatment.insulin;
                    if (height > highMark)
                        height = highMark;
                    if (height < lowMark)
                        height = lowMark;
                    final PointValueExtended pv = new PointValueExtended((float) (treatment.timestamp / FUZZER), (float) height);
                    String mylabel = "";
                    if (treatment.insulin > 0) {
                        if (mylabel.length() > 0)
                            mylabel = mylabel + System.getProperty("line.separator");
                        mylabel = mylabel + (JoH.qs(treatment.insulin, 2) + "u").replace(".0u", "u");
                    }
                    if (treatment.carbs > 0) {
                        if (mylabel.length() > 0)
                            mylabel = mylabel + System.getProperty("line.separator");
                        mylabel = mylabel + (JoH.qs(treatment.carbs, 1) + "g").replace(".0g", "g");
                    }
                    // standard label
                    pv.setLabel(mylabel);
                    // Log.d(TAG, "watchkeypad pv.mylabel: " + mylabel);
                    if ((treatment.notes != null) && (treatment.notes.length() > 0)) {
                        pv.note = treatment.notes;
                        // Log.d(TAG, "watchkeypad pv.note: " + pv.note + " mylabel: " + mylabel);
                        try {
                            final Pattern p = Pattern.compile(".*?pos:([0-9.]+).*");
                            final Matcher m = p.matcher(treatment.enteredBy);
                            if (m.matches()) {
                                pv.set(pv.getX(), (float) JoH.tolerantParseDouble(m.group(1)));
                            }
                        } catch (Exception e) {
                            Log.d(TAG, "Exception matching position: " + e);
                        }
                    } else {
                        pv.note = treatment.getBestShortText();
                    }
                    if (treatmentValues.size() > 0) {
                        // not sure if this >1 is right really - needs a review
                        PointValue lastpv = treatmentValues.get(treatmentValues.size() - 1);
                        if (Math.abs(lastpv.getX() - pv.getX()) < ((10 * 60 * 1000) / FUZZER)) {
                            // merge label with previous - Intelligent parsing and additions go here
                            if (d)
                                Log.d(TAG, "Merge treatment difference: " + Float.toString(lastpv.getX() - pv.getX()));
                            String lastlabel = String.valueOf(lastpv.getLabelAsChars());
                            if (lastlabel.length() > 0) {
                                lastpv.setLabel(lastlabel + "+" + mylabel);
                                pv.setLabel("");
                            }
                        }
                    }
                    // hover
                    treatmentValues.add(pv);
                    if (d)
                        Log.d(TAG, "Treatment total record: " + Double.toString(height) + " " + " timestamp: " + Long.toString(treatment.timestamp));
                }
            } catch (Exception e) {
                Log.e(TAG, "Exception doing treatment values in bggraphbuilder: " + e.toString());
            }
            try {
                // we need to check we actually have sufficient data for this
                double predictedbg = -1000;
                BgReading mylastbg = bgReadings.get(0);
                double lasttimestamp = 0;
                // this can be optimised to oncreate and onchange
                // TODO handle this better now we use profile time blocks
                Profile.reloadPreferencesIfNeeded(prefs);
                try {
                    if (mylastbg != null) {
                        if (doMgdl) {
                            predictedbg = mylastbg.calculated_value;
                        } else {
                            predictedbg = mylastbg.calculated_value_mmol();
                        }
                        // if (d) Log.d(TAG, "Starting prediction with bg of: " + JoH.qs(predictedbg));
                        lasttimestamp = mylastbg.timestamp / FUZZER;
                        if (d)
                            Log.d(TAG, "Starting prediction with bg of: " + JoH.qs(predictedbg) + " secs ago: " + (JoH.ts() - mylastbg.timestamp) / 1000);
                    } else {
                        Log.i(TAG, "COULD NOT GET LAST BG READING FOR PREDICTION!!!");
                    }
                } catch (Exception e) {
                // could not get a bg reading
                }
                final double iobscale = 1 * bgScale;
                final double cobscale = 0.2 * bgScale;
                final double initial_predicted_bg = predictedbg;
                final double relaxed_predicted_bg_limit = initial_predicted_bg * 1.20;
                final double cob_insulin_max_draw_value = highMark * 1.20;
                // final List<Iob> iobinfo_old = Treatments.ioBForGraph(numValues, (start_time * FUZZER));
                // for test
                final List<Iob> iobinfo = (simulation_enabled) ? Treatments.ioBForGraph_new(NUM_VALUES, (start_time * FUZZER)) : null;
                // initial value in case there are no iob records
                long fuzzed_timestamp = (long) end_time;
                if (d)
                    Log.d(TAG, "Internal date timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", new java.util.Date()));
                if (d)
                    Log.d(TAG, "initial Fuzzed end timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", fuzzed_timestamp * FUZZER));
                if (d)
                    Log.d(TAG, "initial Fuzzed start timestamp: " + android.text.format.DateFormat.format("yyyy-MM-dd HH:mm:ss", (long) start_time * FUZZER));
                if ((iobinfo != null) && (prediction_enabled) && (simulation_enabled)) {
                    double predict_weight = 0.1;
                    boolean iob_shown_already = false;
                    for (Iob iob : iobinfo) {
                        // double activity = iob.activity;
                        if ((iob.iob > 0) || (iob.cob > 0) || (iob.jActivity > 0) || (iob.jCarbImpact > 0)) {
                            fuzzed_timestamp = iob.timestamp / FUZZER;
                            if (d)
                                Log.d(TAG, "iob timestamp: " + iob.timestamp);
                            if (iob.iob > Profile.minimum_shown_iob) {
                                double height = iob.iob * iobscale;
                                if (height > cob_insulin_max_draw_value)
                                    height = cob_insulin_max_draw_value;
                                PointValue pv = new PointValue((float) fuzzed_timestamp, (float) height);
                                iobValues.add(pv);
                                // currently scaled by profile
                                double activityheight = iob.jActivity * 3;
                                if (activityheight > cob_insulin_max_draw_value)
                                    activityheight = cob_insulin_max_draw_value;
                                PointValue av = new PointValue((float) fuzzed_timestamp, (float) activityheight);
                                activityValues.add(av);
                            }
                            if (iob.cob > 0) {
                                double height = iob.cob * cobscale;
                                if (height > cob_insulin_max_draw_value)
                                    height = cob_insulin_max_draw_value;
                                PointValue pv = new PointValue((float) fuzzed_timestamp, (float) height);
                                if (d)
                                    Log.d(TAG, "Cob total record: " + JoH.qs(height) + " " + JoH.qs(iob.cob) + " " + Float.toString(pv.getY()) + " @ timestamp: " + Long.toString(iob.timestamp));
                                // warning should not be hardcoded
                                cobValues.add(pv);
                            }
                            // do we actually need to calculate this within the loop - can we use only the last datum?
                            if (fuzzed_timestamp > (lasttimestamp)) {
                                double polyPredict = 0;
                                if (poly != null) {
                                    try {
                                        polyPredict = poly.predict(iob.timestamp);
                                        if (d)
                                            Log.d(TAG, "Poly predict: " + JoH.qs(polyPredict) + " @ " + JoH.dateTimeText(iob.timestamp));
                                        if (show_moment_working_line) {
                                            if (((polyPredict < highMark) || (polyPredict < initial_predicted_bg)) && (polyPredict > 0)) {
                                                PointValue zv = new PointValue((float) fuzzed_timestamp, (float) polyPredict);
                                                polyBgValues.add(zv);
                                            }
                                        }
                                    } catch (Exception e) {
                                        Log.e(TAG, "Got exception with poly predict: " + e.toString());
                                    }
                                }
                                if (d)
                                    Log.d(TAG, "Processing prediction: before: " + JoH.qs(predictedbg) + " activity: " + JoH.qs(iob.jActivity) + " jcarbimpact: " + JoH.qs(iob.jCarbImpact));
                                // lower bg by current insulin activity
                                predictedbg -= iob.jActivity;
                                predictedbg += iob.jCarbImpact;
                                double predictedbg_final = predictedbg;
                                // add momentum characteristics if we have them
                                final boolean momentum_smoothing = true;
                                if ((predict_use_momentum) && (polyPredict > 0)) {
                                    predictedbg_final = ((predictedbg * predict_weight) + polyPredict) / (predict_weight + 1);
                                    if (momentum_smoothing)
                                        predictedbg = predictedbg_final;
                                    if (d)
                                        Log.d(TAG, "forecast predict_weight: " + JoH.qs(predict_weight));
                                }
                                // from 0-infinity - // TODO account for step!!!
                                predict_weight = predict_weight * 2.5;
                                // we should pull in actual graph upper and lower limits here
                                if (((predictedbg_final < cob_insulin_max_draw_value) || (predictedbg_final < relaxed_predicted_bg_limit)) && (predictedbg_final > 0)) {
                                    PointValue zv = new PointValue((float) fuzzed_timestamp, (float) predictedbg_final);
                                    predictedBgValues.add(zv);
                                }
                            }
                            if (fuzzed_timestamp > end_time) {
                                // round up to nearest future hour - timestamps in minutes here
                                predictivehours = (int) (((fuzzed_timestamp - end_time) * FUZZER) / (1000 * 60 * 60)) + 1;
                                if (d)
                                    Log.d(TAG, "Predictive hours updated to: " + predictivehours);
                            } else {
                                // KS Log.d(TAG, "IOB DEBUG: " + (fuzzed_timestamp - end_time) + " " + iob.iob);
                                if (!iob_shown_already && (Math.abs(fuzzed_timestamp - end_time) < 5) && (iob.iob > 0)) {
                                    iob_shown_already = true;
                                    // show current iob
                                    // double position = 12.4 * bgScale; // this is for mmol - needs generic for mg/dl
                                    // if (Math.abs(predictedbg - position) < (2 * bgScale)) {
                                    // position = 7.0 * bgScale;
                                    // }
                                    // PointValue iv = new PointValue((float) fuzzed_timestamp, (float) position);
                                    DecimalFormat df = new DecimalFormat("#");
                                    df.setMaximumFractionDigits(2);
                                    df.setMinimumIntegerDigits(1);
                                    // iv.setLabel("IoB: " + df.format(iob.iob));
                                    Home.updateStatusLine("iob", df.format(iob.iob));
                                // annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
                                }
                            }
                        }
                    }
                    if (d)
                        Log.i(TAG, "Size of iob: " + Integer.toString(iobinfo.size()) + " Predictive hours: " + Integer.toString(predictivehours) + " Predicted end game change: " + JoH.qs(predictedbg - mylastbg.calculated_value_mmol()) + " Start bg: " + JoH.qs(mylastbg.calculated_value_mmol()) + " Predicted: " + JoH.qs(predictedbg));
                // calculate bolus or carb adjustment - these should have granularity for injection / pump and thresholds
                } else {
                    if (d)
                        Log.i(TAG, "iobinfo was null");
                }
                double[] evaluation;
                if (prediction_enabled && simulation_enabled) {
                    // if (doMgdl) {
                    // These routines need to understand how the profile is defined to use native instead of scaled
                    evaluation = Profile.evaluateEndGameMmol(predictedbg, lasttimestamp * FUZZER, end_time * FUZZER);
                    // } else {
                    // evaluation = Profile.evaluateEndGameMmol(predictedbg, lasttimestamp * FUZZER, end_time * FUZZER);
                    // }
                    String bwp_update = "";
                    keyStore.putL("bwp_last_insulin_timestamp", -1);
                    if (d)
                        Log.i(TAG, "Predictive BWP: Current prediction: " + JoH.qs(predictedbg) + " / carbs: " + JoH.qs(evaluation[0]) + " insulin: " + JoH.qs(evaluation[1]));
                    if (!BgReading.isDataStale()) {
                        if (((low_occurs_at < 1) || Pref.getBooleanDefaultFalse("always_show_bwp")) && (Pref.getBooleanDefaultFalse("show_bwp"))) {
                            if (evaluation[0] > Profile.minimum_carb_recommendation) {
                                // PointValue iv = new PointValue((float) fuzzed_timestamp, (float) (10 * bgScale));
                                // iv.setLabel("+Carbs: " + JoH.qs(evaluation[0], 0));
                                bwp_update = "\u224F" + " Carbs: " + JoH.qs(evaluation[0], 0);
                            // annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
                            } else if (evaluation[1] > Profile.minimum_insulin_recommendation) {
                                // PointValue iv = new PointValue((float) fuzzed_timestamp, (float) (11 * bgScale));
                                // iv.setLabel("+Insulin: " + JoH.qs(evaluation[1], 1));
                                keyStore.putS("bwp_last_insulin", JoH.qs(evaluation[1], 1) + ((low_occurs_at > 0) ? ("!") : ""));
                                keyStore.putL("bwp_last_insulin_timestamp", JoH.tsl());
                                // warning symbol
                                bwp_update = "\u224F" + " Insulin: " + JoH.qs(evaluation[1], 1) + ((low_occurs_at > 0) ? (" " + "\u26A0") : "");
                            // annotationValues.add(iv); // needs to be different value list so we can make annotation nicer
                            }
                        }
                    }
                    // always send so we can blank if needed
                    Home.updateStatusLine("bwp", bwp_update);
                }
            } catch (Exception e) {
                Log.e(TAG, "Exception doing iob values in bggraphbuilder: " + e.toString());
            }
        }
    // if !simple
    } finally {
        readings_lock.unlock();
    }
}
Also used : Matcher(java.util.regex.Matcher) BloodTest(com.eveningoutpost.dexdrip.Models.BloodTest) DecimalFormat(java.text.DecimalFormat) ArrayList(java.util.ArrayList) Treatments(com.eveningoutpost.dexdrip.Models.Treatments) Iob(com.eveningoutpost.dexdrip.Models.Iob) Pattern(java.util.regex.Pattern) PointValue(lecho.lib.hellocharts.model.PointValue) PluggableCalibration(com.eveningoutpost.dexdrip.calibrations.PluggableCalibration) Calibration(com.eveningoutpost.dexdrip.Models.Calibration) AddCalibration(com.eveningoutpost.dexdrip.AddCalibration) Date(java.util.Date) PolyTrendLine(com.eveningoutpost.dexdrip.Models.Forecast.PolyTrendLine) Forecast(com.eveningoutpost.dexdrip.Models.Forecast) CalibrationAbstract(com.eveningoutpost.dexdrip.calibrations.CalibrationAbstract) TrendLine(com.eveningoutpost.dexdrip.Models.Forecast.TrendLine) PolyTrendLine(com.eveningoutpost.dexdrip.Models.Forecast.PolyTrendLine) BgReading(com.eveningoutpost.dexdrip.Models.BgReading)

Example 37 with Calibration

use of com.eveningoutpost.dexdrip.Models.Calibration in project xDrip by NightscoutFoundation.

the class NightscoutUploader method doRESTUploadTo.

private void doRESTUploadTo(NightscoutService nightscoutService, String secret, List<BgReading> glucoseDataSets, List<BloodTest> meterRecords, List<Calibration> calRecords) throws Exception {
    final JSONArray array = new JSONArray();
    for (BgReading record : glucoseDataSets) {
        populateV1APIBGEntry(array, record);
    }
    for (BloodTest record : meterRecords) {
        populateV1APIMeterReadingEntry(array, record);
    }
    for (Calibration record : calRecords) {
        final BloodTest dupe = BloodTest.getForPreciseTimestamp(record.timestamp, 60000);
        if (dupe == null) {
            // also add calibrations as meter records
            populateV1APIMeterReadingEntry(array, record);
        } else {
            Log.d(TAG, "Found duplicate blood test entry for this calibration record: " + record.bg + " vs " + dupe.mgdl + " mg/dl");
        }
        populateV1APICalibrationEntry(array, record);
    }
    if (array.length() > 0) {
        // KS
        final RequestBody body = RequestBody.create(MediaType.parse("application/json"), array.toString());
        final Response<ResponseBody> r = nightscoutService.upload(secret, body).execute();
        if (!r.isSuccess())
            throw new UploaderException(r.message(), r.code());
        checkGzipSupport(r);
        try {
            postDeviceStatus(nightscoutService, secret);
        } catch (Exception e) {
            Log.e(TAG, "Ignoring devicestatus post exception: " + e);
        }
    }
    try {
        if (Pref.getBooleanDefaultFalse("send_treatments_to_nightscout")) {
            postTreatments(nightscoutService, secret);
        } else {
            Log.d(TAG, "Skipping treatment upload due to preference disabled");
        }
    } catch (Exception e) {
        Log.e(TAG, "Exception uploading REST API treatments: " + e.getMessage());
        if (e.getMessage().equals("Not Found")) {
            final String msg = "Please ensure careportal plugin is enabled on nightscout for treatment upload!";
            Log.wtf(TAG, msg);
            Home.toaststaticnext(msg);
            handleRestFailure(msg);
        }
    }
    // TODO in the future we may want to merge these in to a single post
    if (Pref.getBooleanDefaultFalse("use_pebble_health") && (Home.get_engineering_mode())) {
        try {
            postHeartRate(nightscoutService, secret);
            postStepsCount(nightscoutService, secret);
            postMotionTracking(nightscoutService, secret);
        } catch (Exception e) {
            if (JoH.ratelimit("heartrate-upload-exception", 3600)) {
                Log.e(TAG, "Exception uploading REST API heartrate: " + e.getMessage());
            }
        }
    }
}
Also used : BloodTest(com.eveningoutpost.dexdrip.Models.BloodTest) JSONArray(org.json.JSONArray) BgReading(com.eveningoutpost.dexdrip.Models.BgReading) Calibration(com.eveningoutpost.dexdrip.Models.Calibration) URISyntaxException(java.net.URISyntaxException) JSONException(org.json.JSONException) IOException(java.io.IOException) RequestBody(com.squareup.okhttp.RequestBody) ResponseBody(com.squareup.okhttp.ResponseBody)

Example 38 with Calibration

use of com.eveningoutpost.dexdrip.Models.Calibration in project xDrip by NightscoutFoundation.

the class NightscoutUploader method doMongoUpload.

private boolean doMongoUpload(SharedPreferences prefs, List<BgReading> glucoseDataSets, List<Calibration> meterRecords, List<Calibration> calRecords) {
    final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.US);
    format.setTimeZone(TimeZone.getDefault());
    final String dbURI = prefs.getString("cloud_storage_mongodb_uri", null);
    if (dbURI != null) {
        try {
            final URI uri = new URI(dbURI.trim());
            if ((uri.getHost().startsWith("192.168.")) && prefs.getBoolean("skip_lan_uploads_when_no_lan", true) && (!JoH.isLANConnected())) {
                Log.d(TAG, "Skipping mongo upload to: " + dbURI + " due to no LAN connection");
                return false;
            }
        } catch (URISyntaxException e) {
            UserError.Log.e(TAG, "Invalid mongo URI: " + e);
        }
    }
    final String collectionName = prefs.getString("cloud_storage_mongodb_collection", null);
    final String dsCollectionName = prefs.getString("cloud_storage_mongodb_device_status_collection", "devicestatus");
    if (dbURI != null && collectionName != null) {
        try {
            // connect to db
            MongoClientURI uri = new MongoClientURI(dbURI.trim() + "?socketTimeoutMS=180000");
            MongoClient client = new MongoClient(uri);
            // get db
            DB db = client.getDB(uri.getDatabase());
            // get collection
            DBCollection dexcomData = db.getCollection(collectionName.trim());
            try {
                Log.i(TAG, "The number of EGV records being sent to MongoDB is " + glucoseDataSets.size());
                for (BgReading record : glucoseDataSets) {
                    // make db object
                    BasicDBObject testData = new BasicDBObject();
                    testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
                    if (record != null) {
                        // KS
                        testData.put("date", record.timestamp);
                        testData.put("dateString", format.format(record.timestamp));
                        testData.put("sgv", Math.round(record.calculated_value));
                        testData.put("direction", record.slopeName());
                        testData.put("type", "sgv");
                        testData.put("filtered", record.ageAdjustedFiltered() * 1000);
                        testData.put("unfiltered", record.usedRaw() * 1000);
                        testData.put("rssi", 100);
                        testData.put("noise", record.noiseValue());
                        dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
                    } else
                        Log.e(TAG, "MongoDB BG record is null.");
                }
                Log.i(TAG, "The number of MBG records being sent to MongoDB is " + meterRecords.size());
                for (Calibration meterRecord : meterRecords) {
                    // make db object
                    BasicDBObject testData = new BasicDBObject();
                    testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
                    testData.put("type", "mbg");
                    testData.put("date", meterRecord.timestamp);
                    testData.put("dateString", format.format(meterRecord.timestamp));
                    testData.put("mbg", meterRecord.bg);
                    dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
                }
                for (Calibration calRecord : calRecords) {
                    // do not upload undefined slopes
                    if (calRecord.slope == 0d)
                        break;
                    // make db object
                    BasicDBObject testData = new BasicDBObject();
                    testData.put("device", "xDrip-" + prefs.getString("dex_collection_method", "BluetoothWixel"));
                    testData.put("date", calRecord.timestamp);
                    testData.put("dateString", format.format(calRecord.timestamp));
                    if (calRecord.check_in) {
                        testData.put("slope", (calRecord.first_slope));
                        testData.put("intercept", ((calRecord.first_intercept)));
                        testData.put("scale", calRecord.first_scale);
                    } else {
                        testData.put("slope", (1000 / calRecord.slope));
                        testData.put("intercept", ((calRecord.intercept * -1000) / (calRecord.slope)));
                        testData.put("scale", 1);
                    }
                    testData.put("type", "cal");
                    dexcomData.insert(testData, WriteConcern.UNACKNOWLEDGED);
                }
                // TODO: quick port from original code, revisit before release
                DBCollection dsCollection = db.getCollection(dsCollectionName);
                BasicDBObject devicestatus = new BasicDBObject();
                devicestatus.put("uploaderBattery", getBatteryLevel());
                devicestatus.put("created_at", format.format(System.currentTimeMillis()));
                dsCollection.insert(devicestatus, WriteConcern.UNACKNOWLEDGED);
                // treatments mongo sync using unified queue
                Log.d(TAG, "Starting treatments mongo direct");
                final long THIS_QUEUE = UploaderQueue.MONGO_DIRECT;
                final DBCollection treatmentDb = db.getCollection("treatments");
                final List<UploaderQueue> tups = UploaderQueue.getPendingbyType(Treatments.class.getSimpleName(), THIS_QUEUE);
                if (tups != null) {
                    for (UploaderQueue up : tups) {
                        if ((up.action.equals("insert") || (up.action.equals("update")))) {
                            Treatments treatment = Treatments.byid(up.reference_id);
                            if (treatment != null) {
                                BasicDBObject record = new BasicDBObject();
                                record.put("timestamp", treatment.timestamp);
                                record.put("eventType", treatment.eventType);
                                record.put("enteredBy", treatment.enteredBy);
                                if (treatment.notes != null)
                                    record.put("notes", treatment.notes);
                                record.put("uuid", treatment.uuid);
                                record.put("carbs", treatment.carbs);
                                record.put("insulin", treatment.insulin);
                                record.put("created_at", treatment.created_at);
                                final BasicDBObject searchQuery = new BasicDBObject().append("uuid", treatment.uuid);
                                // treatmentDb.insert(record, WriteConcern.UNACKNOWLEDGED);
                                Log.d(TAG, "Sending upsert for: " + treatment.toJSON());
                                treatmentDb.update(searchQuery, record, true, false);
                            } else {
                                Log.d(TAG, "Got null for treatment id: " + up.reference_id);
                            }
                            up.completed(THIS_QUEUE);
                        } else if (up.action.equals("delete")) {
                            if (up.reference_uuid != null) {
                                Log.d(TAG, "Processing treatment delete mongo sync for: " + up.reference_uuid);
                                final BasicDBObject searchQuery = new BasicDBObject().append("uuid", up.reference_uuid);
                                Log.d(TAG, treatmentDb.remove(searchQuery, WriteConcern.UNACKNOWLEDGED).toString());
                            }
                            up.completed(THIS_QUEUE);
                        } else {
                            Log.e(TAG, "Unsupported operation type for treatment: " + up.action);
                        }
                    }
                    Log.d(TAG, "Processed " + tups.size() + " Treatment mongo direct upload records");
                }
                client.close();
                failurecount = 0;
                return true;
            } catch (Exception e) {
                Log.e(TAG, "Unable to upload data to mongo " + e.getMessage());
                failurecount++;
                if (failurecount > 4) {
                    Home.toaststaticnext("Mongo " + failurecount + " up fails: " + e.getMessage().substring(0, 51));
                }
            } finally {
                if (client != null) {
                    client.close();
                }
            }
        } catch (Exception e) {
            Log.e(TAG, "Unable to upload data to mongo " + e.getMessage());
        }
    }
    return false;
}
Also used : MongoClientURI(com.mongodb.MongoClientURI) URISyntaxException(java.net.URISyntaxException) Calibration(com.eveningoutpost.dexdrip.Models.Calibration) URI(java.net.URI) MongoClientURI(com.mongodb.MongoClientURI) URISyntaxException(java.net.URISyntaxException) JSONException(org.json.JSONException) IOException(java.io.IOException) MongoClient(com.mongodb.MongoClient) DBCollection(com.mongodb.DBCollection) BasicDBObject(com.mongodb.BasicDBObject) BgReading(com.eveningoutpost.dexdrip.Models.BgReading) Treatments(com.eveningoutpost.dexdrip.Models.Treatments) SimpleDateFormat(java.text.SimpleDateFormat) DB(com.mongodb.DB)

Example 39 with Calibration

use of com.eveningoutpost.dexdrip.Models.Calibration in project xDrip by NightscoutFoundation.

the class WixelReader method readData.

private void readData() {
    Long LastReportedTime = 0L;
    TransmitterData lastTransmitterData = TransmitterData.last();
    if (lastTransmitterData != null) {
        LastReportedTime = lastTransmitterData.timestamp;
        // jamorham fix to avoid going twice to network when we just got a packet
        if ((new Date().getTime() - LastReportedTime) < DEXCOM_PERIOD - 2000) {
            Log.d(TAG, "Already have a recent packet - returning");
            if (JoH.ratelimit("deferred-msg", 60)) {
                statusLog(" Deferred", "Already have recent reading");
            }
            return;
        } else {
            statusLog(" Deferred", "");
        }
    }
    Long startReadTime = LastReportedTime;
    TransmitterRawData LastReportedReading = null;
    Log.d(TAG, "Starting... LastReportedReading " + LastReportedReading);
    // try to read one object...
    TransmitterRawData[] LastReadingArr = null;
    String recieversIpAddresses;
    if (!WixelReader.IsConfigured()) {
        return;
    }
    if ((DexCollectionType.getDexCollectionType() == DexCollectionType.Mock) && Home.get_engineering_mode()) {
        recieversIpAddresses = "fake://FAKE_DATA";
    } else {
        recieversIpAddresses = Pref.getString("wifi_recievers_addresses", "");
    }
    // How many packets should we read? we look at the maximum time between last calibration and last reading time
    // and calculate how much are needed.
    final Calibration lastCalibration = Calibration.lastValid();
    if (lastCalibration != null) {
        startReadTime = Math.max(startReadTime, lastCalibration.timestamp);
    }
    Long gapTime = new Date().getTime() - startReadTime + 120000;
    int packetsToRead = (int) (gapTime / (5 * 60000));
    // don't read too much, but always read 1.
    packetsToRead = Math.min(packetsToRead, 200);
    packetsToRead = Math.max(packetsToRead, 1);
    Log.d(TAG, "reading " + packetsToRead + " packets");
    LastReadingArr = Read(recieversIpAddresses, packetsToRead);
    if (LastReadingArr == null || LastReadingArr.length == 0) {
        return;
    }
    for (TransmitterRawData LastReading : LastReadingArr) {
        // Make sure we do not report packets from the far future...
        if ((LastReading.CaptureDateTime > LastReportedTime + 120000) && (!almostEquals(LastReading, LastReportedReading)) && LastReading.CaptureDateTime < new Date().getTime() + 120000) {
            // We have a real new reading...
            Log.d(TAG, "calling setSerialDataToTransmitterRawData " + LastReading.RawValue + " LastReading.CaptureDateTime " + LastReading.CaptureDateTime + " " + LastReading.TransmissionId);
            setSerialDataToTransmitterRawData(LastReading.RawValue, LastReading.FilteredValue, LastReading.BatteryLife, LastReading.CaptureDateTime);
            LastReportedReading = LastReading;
            LastReportedTime = LastReading.CaptureDateTime;
            if (LastReading.UploaderBatteryLife > 0) {
                Pref.setInt("parakeet_battery", LastReading.UploaderBatteryLife);
                CheckBridgeBattery.checkParakeetBattery();
                if (Home.get_master()) {
                    GcmActivity.sendParakeetBattery(LastReading.UploaderBatteryLife);
                }
            }
        }
    }
}
Also used : TransmitterData(com.eveningoutpost.dexdrip.Models.TransmitterData) Calibration(com.eveningoutpost.dexdrip.Models.Calibration) Date(java.util.Date)

Example 40 with Calibration

use of com.eveningoutpost.dexdrip.Models.Calibration in project xDrip by NightscoutFoundation.

the class CalibrationGraph method overWriteIntercept.

private void overWriteIntercept() {
    final EditText editText = new EditText(this);
    editText.setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_SIGNED | InputType.TYPE_NUMBER_FLAG_DECIMAL);
    new AlertDialog.Builder(this).setTitle("Ovewrite Intercept").setMessage("Overwrite Intercept").setView(editText).setPositiveButton("OK", new DialogInterface.OnClickListener() {

        public void onClick(DialogInterface dialog, int which) {
            String text = editText.getText().toString();
            if (!TextUtils.isEmpty(text)) {
                double doubleValue = JoH.tolerantParseDouble(text);
                Calibration calibration = Calibration.lastValid();
                calibration.intercept = doubleValue;
                calibration.save();
                CalibrationSendQueue.addToQueue(calibration, getApplicationContext());
                recreate();
            } else {
                JoH.static_toast_long("Input not found! Cancelled!");
            }
        }
    }).setNegativeButton("Cancel", new DialogInterface.OnClickListener() {

        @Override
        public void onClick(DialogInterface dialog, int which) {
            JoH.static_toast_long("Cancelled!");
        }
    }).show();
}
Also used : EditText(android.widget.EditText) DialogInterface(android.content.DialogInterface) Calibration(com.eveningoutpost.dexdrip.Models.Calibration)

Aggregations

Calibration (com.eveningoutpost.dexdrip.Models.Calibration)54 BgReading (com.eveningoutpost.dexdrip.Models.BgReading)20 ArrayList (java.util.ArrayList)16 Date (java.util.Date)12 EditText (android.widget.EditText)8 Sensor (com.eveningoutpost.dexdrip.Models.Sensor)8 DataMap (com.google.android.gms.wearable.DataMap)8 DialogInterface (android.content.DialogInterface)6 Intent (android.content.Intent)6 NonNull (android.support.annotation.NonNull)6 BloodTest (com.eveningoutpost.dexdrip.Models.BloodTest)6 PolyTrendLine (com.eveningoutpost.dexdrip.Models.Forecast.PolyTrendLine)6 TrendLine (com.eveningoutpost.dexdrip.Models.Forecast.TrendLine)6 Treatments (com.eveningoutpost.dexdrip.Models.Treatments)6 CalibrationAbstract (com.eveningoutpost.dexdrip.calibrations.CalibrationAbstract)6 PluggableCalibration (com.eveningoutpost.dexdrip.calibrations.PluggableCalibration)6 IOException (java.io.IOException)6 PointValue (lecho.lib.hellocharts.model.PointValue)6 JSONException (org.json.JSONException)6 View (android.view.View)4