mirror of
https://github.com/GoldenCheetah/GoldenCheetah.git
synced 2026-02-13 08:08:42 +00:00
Banister Modelling
.. Banister model fitting using LM .. can plot Banister curves on trends plots; - Performance curve (NTE+PTE) - Predicted CP curve (Performance curve scaled) - Negative Training Effect - Positive Training Effect .. the code is sub-optimal and needs to be refactored to cache and refresh less frequently (using the same pattern as PMC most likely). .. the model fitting can fail and needs to be made a lot more robust, along with ensuring the samples we fit to are appropriate.
This commit is contained in:
@@ -449,9 +449,10 @@ LTMPlot::setData(LTMSettings *set)
|
||||
: new QwtPlotCurve(metricDetail.uname);
|
||||
current->setVisible(!metricDetail.hidden);
|
||||
settings->metrics[m].curve = current;
|
||||
if (metricDetail.type == METRIC_BEST || metricDetail.type == METRIC_STRESS)
|
||||
if (metricDetail.type == METRIC_BEST || metricDetail.type == METRIC_STRESS || metricDetail.type == METRIC_BANISTER) {
|
||||
//fprintf(stderr, "insert curve %s, %s\n", metricDetail.uname.toStdString().c_str(), metricDetail.bestSymbol.toStdString().c_str()); fflush(stderr);
|
||||
curves.insert(metricDetail.bestSymbol, current);
|
||||
else
|
||||
} else
|
||||
curves.insert(metricDetail.symbol, current);
|
||||
stacks.insert(current, stackcounter+1);
|
||||
if (appsettings->value(this, GC_ANTIALIAS, true).toBool() == true)
|
||||
@@ -603,9 +604,10 @@ LTMPlot::setData(LTMSettings *set)
|
||||
: new QwtPlotCurve(metricDetail.uname);
|
||||
current->setVisible(!metricDetail.hidden);
|
||||
settings->metrics[m].curve = current;
|
||||
if (metricDetail.type == METRIC_BEST || metricDetail.type == METRIC_STRESS)
|
||||
if (metricDetail.type == METRIC_BEST || metricDetail.type == METRIC_STRESS || metricDetail.type == METRIC_BANISTER) {
|
||||
//fprintf(stderr, "insert curve %s, %s\n", metricDetail.uname.toStdString().c_str(), metricDetail.bestSymbol.toStdString().c_str()); fflush(stderr);
|
||||
curves.insert(metricDetail.bestSymbol, current);
|
||||
else
|
||||
} else
|
||||
curves.insert(metricDetail.symbol, current);
|
||||
if (appsettings->value(this, GC_ANTIALIAS, true).toBool() == true)
|
||||
current->setRenderHint(QwtPlotItem::RenderAntialiased);
|
||||
@@ -3546,110 +3548,11 @@ void
|
||||
LTMPlot::createBanisterData(Context *context, LTMSettings *settings, MetricDetail metricDetail,
|
||||
QVector<double>&x,QVector<double>&y,int&n, bool)
|
||||
{
|
||||
n=0;
|
||||
#if 0
|
||||
QString scoreType;
|
||||
int stressType = STRESS_LTS;
|
||||
// banister model
|
||||
Banister f(context, metricDetail.symbol, 0,0,0,0);
|
||||
|
||||
// create a custom set of summary metric data!
|
||||
if (metricDetail.type == METRIC_PM) {
|
||||
int valuesType = VALUES_CALCULATED;
|
||||
|
||||
QString symbol = metricDetail.symbol;
|
||||
if (symbol.startsWith("planned_")) {
|
||||
valuesType = VALUES_PLANNED;
|
||||
symbol = symbol.right(symbol.length()-8);
|
||||
} else if (symbol.startsWith("expected_")) {
|
||||
valuesType = VALUES_EXPECTED;
|
||||
symbol = symbol.right(symbol.length()-9);
|
||||
}
|
||||
|
||||
if (symbol.startsWith("skiba")) {
|
||||
scoreType = "skiba_bike_score";
|
||||
} else if (symbol.startsWith("antiss")) {
|
||||
scoreType = "antiss_score";
|
||||
} else if (symbol.startsWith("atiss")) {
|
||||
scoreType = "atiss_score";
|
||||
} else if (symbol.startsWith("coggan")) {
|
||||
scoreType = "coggan_tss";
|
||||
} else if (symbol.startsWith("daniels")) {
|
||||
scoreType = "daniels_points";
|
||||
} else if (symbol.startsWith("trimp")) {
|
||||
scoreType = "trimp_points";
|
||||
} else if (symbol.startsWith("work")) {
|
||||
scoreType = "total_work";
|
||||
} else if (symbol.startsWith("cp_")) {
|
||||
scoreType = "skiba_cp_exp";
|
||||
} else if (symbol.startsWith("wprime")) {
|
||||
scoreType = "skiba_wprime_exp";
|
||||
} else if (symbol.startsWith("distance")) {
|
||||
scoreType = "total_distance";
|
||||
} else if (symbol.startsWith("triscore")) {
|
||||
scoreType = "triscore";
|
||||
}
|
||||
|
||||
stressType = STRESS_LTS; // if in doubt
|
||||
if (valuesType == VALUES_CALCULATED) {
|
||||
if (metricDetail.symbol.endsWith("lts") || metricDetail.symbol.endsWith("ctl"))
|
||||
stressType = STRESS_LTS;
|
||||
else if (metricDetail.symbol.endsWith("sts") || metricDetail.symbol.endsWith("atl"))
|
||||
stressType = STRESS_STS;
|
||||
else if (metricDetail.symbol.endsWith("sb"))
|
||||
stressType = STRESS_SB;
|
||||
else if (metricDetail.symbol.endsWith("lr"))
|
||||
stressType = STRESS_RR;
|
||||
}
|
||||
else if (valuesType == VALUES_PLANNED) {
|
||||
if (metricDetail.symbol.endsWith("lts") || metricDetail.symbol.endsWith("ctl"))
|
||||
stressType = STRESS_PLANNED_LTS;
|
||||
else if (metricDetail.symbol.endsWith("sts") || metricDetail.symbol.endsWith("atl"))
|
||||
stressType = STRESS_PLANNED_STS;
|
||||
else if (metricDetail.symbol.endsWith("sb"))
|
||||
stressType = STRESS_PLANNED_SB;
|
||||
else if (metricDetail.symbol.endsWith("lr"))
|
||||
stressType = STRESS_PLANNED_RR;
|
||||
}
|
||||
else if (valuesType == VALUES_EXPECTED) {
|
||||
if (metricDetail.symbol.endsWith("lts") || metricDetail.symbol.endsWith("ctl"))
|
||||
stressType = STRESS_EXPECTED_LTS;
|
||||
else if (metricDetail.symbol.endsWith("sts") || metricDetail.symbol.endsWith("atl"))
|
||||
stressType = STRESS_EXPECTED_STS;
|
||||
else if (metricDetail.symbol.endsWith("sb"))
|
||||
stressType = STRESS_EXPECTED_SB;
|
||||
else if (metricDetail.symbol.endsWith("lr"))
|
||||
stressType = STRESS_EXPECTED_RR;
|
||||
}
|
||||
} else {
|
||||
|
||||
scoreType = metricDetail.symbol; // just use the selected metric
|
||||
stressType = metricDetail.stressType;
|
||||
}
|
||||
|
||||
|
||||
// initial state
|
||||
PMCData *athletePMC = NULL;
|
||||
PMCData *localPMC = NULL;
|
||||
n = 0;
|
||||
|
||||
// create local PMC if filtered
|
||||
if (!SearchFilterBox::isNull(metricDetail.datafilter) || settings->specification.isFiltered()) {
|
||||
|
||||
// don't filter for date range!!
|
||||
Specification allDates = settings->specification;
|
||||
|
||||
// curve specific filter
|
||||
if (!SearchFilterBox::isNull(metricDetail.datafilter))
|
||||
allDates.addMatches(SearchFilterBox::matches(context, metricDetail.datafilter));
|
||||
|
||||
allDates.setDateRange(DateRange(QDate(),QDate()));
|
||||
localPMC = new PMCData(context, allDates, scoreType);
|
||||
}
|
||||
|
||||
// use global one if not filtered
|
||||
if (!localPMC) athletePMC = context->athlete->getPMCFor(scoreType);
|
||||
|
||||
// point to the right one
|
||||
PMCData *pmcData = localPMC ? localPMC : athletePMC;
|
||||
// perform fit (date range todo)
|
||||
f.fit();
|
||||
|
||||
int maxdays = groupForDate(settings->end.date(), settings->groupBy)
|
||||
- groupForDate(settings->start.date(), settings->groupBy);
|
||||
@@ -3676,57 +3579,7 @@ LTMPlot::createBanisterData(Context *context, LTMSettings *settings, MetricDetai
|
||||
int currentDay = groupForDate(date, settings->groupBy);
|
||||
|
||||
// value for day
|
||||
double value = 0.0f;
|
||||
|
||||
switch (stressType) {
|
||||
case STRESS_LTS:
|
||||
value = pmcData->lts(date);
|
||||
break;
|
||||
case STRESS_STS:
|
||||
value = pmcData->sts(date);
|
||||
break;
|
||||
case STRESS_SB:
|
||||
value = pmcData->sb(date);
|
||||
break;
|
||||
case STRESS_RR:
|
||||
value = pmcData->rr(date);
|
||||
break;
|
||||
case STRESS_PLANNED_LTS:
|
||||
value = pmcData->plannedLts(date);
|
||||
break;
|
||||
case STRESS_PLANNED_STS:
|
||||
value = pmcData->plannedSts(date);
|
||||
break;
|
||||
case STRESS_PLANNED_SB:
|
||||
value = pmcData->plannedSb(date);
|
||||
break;
|
||||
case STRESS_PLANNED_RR:
|
||||
value = pmcData->plannedRr(date);
|
||||
break;
|
||||
case STRESS_EXPECTED_LTS:
|
||||
value = pmcData->expectedLts(date);
|
||||
if (past)
|
||||
plotData = false;
|
||||
break;
|
||||
case STRESS_EXPECTED_STS:
|
||||
value = pmcData->expectedSts(date);
|
||||
if (past)
|
||||
plotData = false;
|
||||
break;
|
||||
case STRESS_EXPECTED_SB:
|
||||
value = pmcData->expectedSb(date);
|
||||
if (past)
|
||||
plotData = false;
|
||||
break;
|
||||
case STRESS_EXPECTED_RR:
|
||||
value = pmcData->expectedRr(date);
|
||||
if (past)
|
||||
plotData = false;
|
||||
break;
|
||||
default:
|
||||
value = 0;
|
||||
break;
|
||||
}
|
||||
double value = f.value(date, metricDetail.stressType);
|
||||
|
||||
if (plotData && (value || wantZero)) {
|
||||
unsigned long seconds = 1;
|
||||
@@ -3785,11 +3638,8 @@ LTMPlot::createBanisterData(Context *context, LTMSettings *settings, MetricDetai
|
||||
lastDay = currentDay;
|
||||
}
|
||||
}
|
||||
|
||||
// wipe away local
|
||||
if (localPMC) delete localPMC;
|
||||
#endif
|
||||
}
|
||||
|
||||
void
|
||||
LTMPlot::createMeasureData(Context *context, LTMSettings *settings, MetricDetail metricDetail, QVector<double>&x,QVector<double>&y,int&n, bool)
|
||||
{
|
||||
|
||||
@@ -104,6 +104,7 @@ class RideBest;
|
||||
#define BANISTER_NTE 0
|
||||
#define BANISTER_PTE 1
|
||||
#define BANISTER_PERFORMANCE 2
|
||||
#define BANISTER_CP 3
|
||||
|
||||
// type of values
|
||||
#define VALUES_CALCULATED 0
|
||||
|
||||
@@ -1946,7 +1946,8 @@ EditMetricDetailDialog::EditMetricDetailDialog(Context *context, LTMTool *ltmToo
|
||||
banisterTypeSelect->addItem(tr("Negative Training Effect (NTE)"), BANISTER_NTE);
|
||||
banisterTypeSelect->addItem(tr("Positive Training Effect (PTE)"), BANISTER_PTE);
|
||||
banisterTypeSelect->addItem(tr("Performance (Power Index)"), BANISTER_PERFORMANCE);
|
||||
banisterTypeSelect->setCurrentIndex(metricDetail->stressType < 3 ? metricDetail->stressType : 2);
|
||||
banisterTypeSelect->addItem(tr("Predicted CP (Watts)"), BANISTER_CP);
|
||||
banisterTypeSelect->setCurrentIndex(metricDetail->stressType < 4 ? metricDetail->stressType : 2);
|
||||
|
||||
banisterWidget = new QWidget(this);
|
||||
banisterWidget->setContentsMargins(0,0,0,0);
|
||||
@@ -2209,6 +2210,7 @@ EditMetricDetailDialog::EditMetricDetailDialog(Context *context, LTMTool *ltmToo
|
||||
connect(chooseMeasure, SIGNAL(toggled(bool)), this, SLOT(measureName()));
|
||||
connect(choosePerformance, SIGNAL(toggled(bool)), this, SLOT(performanceName()));
|
||||
connect(stressTypeSelect, SIGNAL(currentIndexChanged(int)), this, SLOT(stressName()));
|
||||
connect(banisterTypeSelect, SIGNAL(currentIndexChanged(int)), this, SLOT(banisterName()));
|
||||
connect(chooseMetric, SIGNAL(toggled(bool)), this, SLOT(metricSelected()));
|
||||
connect(duration, SIGNAL(valueChanged(double)), this, SLOT(bestName()));
|
||||
connect(durationUnits, SIGNAL(currentIndexChanged(int)), this, SLOT(bestName()));
|
||||
@@ -2366,10 +2368,11 @@ EditMetricDetailDialog::banisterName()
|
||||
metricDetail->bestSymbol = metricDetail->symbol;
|
||||
|
||||
// append type
|
||||
switch(stressTypeSelect->currentIndex()) {
|
||||
switch(banisterTypeSelect->currentIndex()) {
|
||||
case 0: metricDetail->bestSymbol += "_nte"; break;
|
||||
case 1: metricDetail->bestSymbol += "_pte"; break;
|
||||
case 2: metricDetail->bestSymbol += "_perf"; break;
|
||||
case 3: metricDetail->bestSymbol += "_cp"; break;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -2431,7 +2434,7 @@ void
|
||||
EditMetricDetailDialog::metricSelected()
|
||||
{
|
||||
// only in metric mode
|
||||
if (!chooseMetric->isChecked() && !chooseStress->isChecked()) return;
|
||||
if (!chooseMetric->isChecked() && !chooseStress->isChecked() && !chooseBanister->isChecked()) return;
|
||||
|
||||
// user selected a different metric
|
||||
// so update accordingly
|
||||
@@ -2440,73 +2443,76 @@ EditMetricDetailDialog::metricSelected()
|
||||
// out of bounds !
|
||||
if (index < 0 || index >= ltmTool->metrics.count()) return;
|
||||
|
||||
userName->setText(ltmTool->metrics[index].uname);
|
||||
userUnits->setText(ltmTool->metrics[index].uunits);
|
||||
curveSmooth->setChecked(ltmTool->metrics[index].smooth);
|
||||
fillCurve->setChecked(ltmTool->metrics[index].fillCurve);
|
||||
labels->setChecked(ltmTool->metrics[index].labels);
|
||||
stack->setChecked(ltmTool->metrics[index].stack);
|
||||
showBest->setValue(ltmTool->metrics[index].topN);
|
||||
showOut->setValue(ltmTool->metrics[index].topOut);
|
||||
baseLine->setValue(ltmTool->metrics[index].baseline);
|
||||
penColor = ltmTool->metrics[index].penColor;
|
||||
trendType->setCurrentIndex(ltmTool->metrics[index].trendtype);
|
||||
setButtonIcon(penColor);
|
||||
if (!chooseBanister->isChecked()) {
|
||||
|
||||
// curve style
|
||||
switch (ltmTool->metrics[index].curveStyle) {
|
||||
userName->setText(ltmTool->metrics[index].uname);
|
||||
userUnits->setText(ltmTool->metrics[index].uunits);
|
||||
curveSmooth->setChecked(ltmTool->metrics[index].smooth);
|
||||
fillCurve->setChecked(ltmTool->metrics[index].fillCurve);
|
||||
labels->setChecked(ltmTool->metrics[index].labels);
|
||||
stack->setChecked(ltmTool->metrics[index].stack);
|
||||
showBest->setValue(ltmTool->metrics[index].topN);
|
||||
showOut->setValue(ltmTool->metrics[index].topOut);
|
||||
baseLine->setValue(ltmTool->metrics[index].baseline);
|
||||
penColor = ltmTool->metrics[index].penColor;
|
||||
trendType->setCurrentIndex(ltmTool->metrics[index].trendtype);
|
||||
setButtonIcon(penColor);
|
||||
|
||||
// curve style
|
||||
switch (ltmTool->metrics[index].curveStyle) {
|
||||
|
||||
case QwtPlotCurve::Steps:
|
||||
curveStyle->setCurrentIndex(0);
|
||||
break;
|
||||
case QwtPlotCurve::Lines:
|
||||
curveStyle->setCurrentIndex(1);
|
||||
break;
|
||||
case QwtPlotCurve::Sticks:
|
||||
curveStyle->setCurrentIndex(2);
|
||||
break;
|
||||
case QwtPlotCurve::Dots:
|
||||
default:
|
||||
curveStyle->setCurrentIndex(3);
|
||||
break;
|
||||
case QwtPlotCurve::Steps:
|
||||
curveStyle->setCurrentIndex(0);
|
||||
break;
|
||||
case QwtPlotCurve::Lines:
|
||||
curveStyle->setCurrentIndex(1);
|
||||
break;
|
||||
case QwtPlotCurve::Sticks:
|
||||
curveStyle->setCurrentIndex(2);
|
||||
break;
|
||||
case QwtPlotCurve::Dots:
|
||||
default:
|
||||
curveStyle->setCurrentIndex(3);
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// curveSymbol
|
||||
switch (ltmTool->metrics[index].symbolStyle) {
|
||||
// curveSymbol
|
||||
switch (ltmTool->metrics[index].symbolStyle) {
|
||||
|
||||
case QwtSymbol::NoSymbol:
|
||||
curveSymbol->setCurrentIndex(0);
|
||||
break;
|
||||
case QwtSymbol::Ellipse:
|
||||
curveSymbol->setCurrentIndex(1);
|
||||
break;
|
||||
case QwtSymbol::Rect:
|
||||
curveSymbol->setCurrentIndex(2);
|
||||
break;
|
||||
case QwtSymbol::Diamond:
|
||||
curveSymbol->setCurrentIndex(3);
|
||||
break;
|
||||
case QwtSymbol::Triangle:
|
||||
curveSymbol->setCurrentIndex(4);
|
||||
break;
|
||||
case QwtSymbol::XCross:
|
||||
curveSymbol->setCurrentIndex(5);
|
||||
break;
|
||||
case QwtSymbol::Hexagon:
|
||||
curveSymbol->setCurrentIndex(6);
|
||||
break;
|
||||
case QwtSymbol::Star1:
|
||||
default:
|
||||
curveSymbol->setCurrentIndex(7);
|
||||
break;
|
||||
case QwtSymbol::NoSymbol:
|
||||
curveSymbol->setCurrentIndex(0);
|
||||
break;
|
||||
case QwtSymbol::Ellipse:
|
||||
curveSymbol->setCurrentIndex(1);
|
||||
break;
|
||||
case QwtSymbol::Rect:
|
||||
curveSymbol->setCurrentIndex(2);
|
||||
break;
|
||||
case QwtSymbol::Diamond:
|
||||
curveSymbol->setCurrentIndex(3);
|
||||
break;
|
||||
case QwtSymbol::Triangle:
|
||||
curveSymbol->setCurrentIndex(4);
|
||||
break;
|
||||
case QwtSymbol::XCross:
|
||||
curveSymbol->setCurrentIndex(5);
|
||||
break;
|
||||
case QwtSymbol::Hexagon:
|
||||
curveSymbol->setCurrentIndex(6);
|
||||
break;
|
||||
case QwtSymbol::Star1:
|
||||
default:
|
||||
curveSymbol->setCurrentIndex(7);
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
(*metricDetail) = ltmTool->metrics[index]; // overwrite!
|
||||
|
||||
// make the banister name
|
||||
if (chooseStress->isChecked()) banisterName();
|
||||
if (chooseBanister->isChecked()) banisterName();
|
||||
// make the stress name
|
||||
if (chooseStress->isChecked()) stressName();
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ class Specification;
|
||||
class AthleteBest;
|
||||
class RideCacheModel;
|
||||
class Estimator;
|
||||
class Banister;
|
||||
|
||||
class RideCache : public QObject
|
||||
{
|
||||
@@ -136,6 +137,7 @@ class RideCache : public QObject
|
||||
friend class ::MainWindow; // save dialog
|
||||
friend class ::RideCacheBackgroundRefresh;
|
||||
friend class ::LTMPlot; // get weekly performances
|
||||
friend class ::Banister; // get weekly performances
|
||||
|
||||
Context *context;
|
||||
QDir directory, plannedDirectory;
|
||||
|
||||
@@ -22,8 +22,10 @@
|
||||
#include "Athlete.h"
|
||||
#include "Context.h"
|
||||
#include "Settings.h"
|
||||
#include "RideCache.h"
|
||||
#include "RideItem.h"
|
||||
#include "IntervalItem.h"
|
||||
#include "IntervalItem.h"
|
||||
#include "LTMOutliers.h"
|
||||
#include "Units.h"
|
||||
#include "Zones.h"
|
||||
@@ -31,13 +33,381 @@
|
||||
#include <assert.h>
|
||||
#include <algorithm>
|
||||
#include <QVector>
|
||||
#include <QMutex>
|
||||
#include <QApplication>
|
||||
#include "lmcurve.h"
|
||||
|
||||
|
||||
// the mean athlete from opendata analysis
|
||||
const double typical_CP = 261,
|
||||
typical_WPrime = 15500,
|
||||
typical_Pmax = 1100;
|
||||
|
||||
// minimum number of days that is gap in seasons
|
||||
const int typical_SeasonBreak = 42;
|
||||
|
||||
// for debugging
|
||||
#ifndef BANISTER_DEBUG
|
||||
#define BANISTER_DEBUG false
|
||||
#endif
|
||||
#ifdef Q_CC_MSVC
|
||||
#define printd(fmt, ...) do { \
|
||||
if (BANISTER_DEBUG) { \
|
||||
printf("[%s:%d %s] " fmt , __FILE__, __LINE__, \
|
||||
__FUNCTION__, __VA_ARGS__); \
|
||||
fflush(stdout); \
|
||||
} \
|
||||
} while(0)
|
||||
#else
|
||||
#define printd(fmt, args...) \
|
||||
do { \
|
||||
if (BANISTER_DEBUG) { \
|
||||
printf("[%s:%d %s] " fmt , __FILE__, __LINE__, \
|
||||
__FUNCTION__, ##args); \
|
||||
fflush(stdout); \
|
||||
} \
|
||||
} while(0)
|
||||
#endif
|
||||
//
|
||||
// Banister IR model
|
||||
//
|
||||
// Translates training impulse (I) for e.g. BikeScore into performance response (R) e.g. CP
|
||||
//
|
||||
// Two curves Negative Training Effect (NTE) and Positive Training Effect (PTE)
|
||||
// are computed and added together give a Performance Response P(t)
|
||||
//
|
||||
// Each curve has two parameters; a coefficient to map I to R (k) and a decay (t)
|
||||
//
|
||||
// This class collects data and performs a model fit to estimate k1,k2,t1,t2.
|
||||
//
|
||||
// At present the Response is fixed (Power Index) and the Impulse is user definable
|
||||
// we are likely to expand this to allow multiple performance measures to allow
|
||||
// the model to look at IR between specific type of impulse and a specific response
|
||||
// e.g. supramaximal work to pmax, above threshold to v02max etc.
|
||||
//
|
||||
//
|
||||
Banister::Banister(Context *context, QString symbol, double t1, double t2, double k1, double k2) :
|
||||
symbol(symbol), k1(k1), k2(k2), t1(t1), t2(t2), days(0), context(context)
|
||||
{
|
||||
refresh();
|
||||
}
|
||||
|
||||
// get at the computed value
|
||||
double
|
||||
Banister::value(QDate date, int type)
|
||||
{
|
||||
// check in range
|
||||
if (date > stop || date < start) return 0;
|
||||
int index=date.toJulianDay()-start.toJulianDay();
|
||||
|
||||
switch(type) {
|
||||
case BANISTER_NTE: return data[index].nte;
|
||||
case BANISTER_PTE: return data[index].pte;
|
||||
case BANISTER_CP: return data[index].perf * typical_CP / 100;
|
||||
default:
|
||||
case BANISTER_PERFORMANCE: return data[index].perf;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
Banister::init()
|
||||
{
|
||||
// ok we can initialise properly
|
||||
start = QDate();
|
||||
stop = QDate();
|
||||
rides = 0;
|
||||
meanscore = 0;
|
||||
days = 0;
|
||||
data.resize(0);
|
||||
windows.clear();
|
||||
|
||||
// default values
|
||||
k1=0.2;
|
||||
k2=0.2;
|
||||
t1=31;
|
||||
t2=15;
|
||||
}
|
||||
|
||||
void
|
||||
Banister::refresh()
|
||||
{
|
||||
// clear
|
||||
init();
|
||||
|
||||
// all data
|
||||
QDate f, l;
|
||||
if (context->athlete->rideCache->rides().count()) {
|
||||
|
||||
// set date range - extend to a year after last ride
|
||||
f= context->athlete->rideCache->rides().first()->dateTime.date();
|
||||
l= context->athlete->rideCache->rides().last()->dateTime.date().addYears(1);
|
||||
|
||||
} else
|
||||
return;
|
||||
|
||||
// resize the data
|
||||
if (l.toJulianDay() - f.toJulianDay() <= 0) return;
|
||||
|
||||
// ok we can initialise properly
|
||||
start = f;
|
||||
stop = l;
|
||||
days = stop.toJulianDay() - start.toJulianDay();
|
||||
data.resize(days);
|
||||
|
||||
// now initialise with zero values
|
||||
data.fill(banisterData());
|
||||
|
||||
// guess we will have only half as performances
|
||||
performanceDay.resize(days/2);
|
||||
performanceScore.resize(days/2);
|
||||
performances=0;
|
||||
|
||||
//
|
||||
// 1. COLLECT IMPULSE AND RESPONSE DATA
|
||||
//
|
||||
|
||||
// we can now fill with ride values
|
||||
foreach(RideItem *item, context->athlete->rideCache->rides()) {
|
||||
|
||||
// load measure
|
||||
double score = item->getForSymbol(symbol);
|
||||
long day = item->dateTime.date().toJulianDay() - start.toJulianDay();
|
||||
data[day].score = score;
|
||||
|
||||
// average out measures
|
||||
if (score>0) {
|
||||
rides++;
|
||||
meanscore += score; // averaged at end
|
||||
}
|
||||
|
||||
// get best scoring performance *test* in the ride
|
||||
double todaybest=0;
|
||||
foreach(IntervalItem *i, item->intervals()) {
|
||||
if (i->istest()) {
|
||||
double pix=i->getForSymbol("power_index");
|
||||
if (pix > todaybest) todaybest = pix;
|
||||
}
|
||||
}
|
||||
|
||||
// is there a performance test already there for today?
|
||||
if (todaybest > 0) {
|
||||
if (performances > 0 && performanceDay[performances-1] == day) {
|
||||
if (performanceScore[performances-1] < todaybest)
|
||||
performanceScore[performances-1] = todaybest;
|
||||
} else {
|
||||
performanceDay[performances] = day;
|
||||
performanceScore[performances] = todaybest;
|
||||
performances++;
|
||||
}
|
||||
}
|
||||
|
||||
// if we didn't find a performance test in the ride lets see if there
|
||||
// is a weekly performance already identified
|
||||
if (!(todaybest > 0)) {
|
||||
Performance p = context->athlete->rideCache->estimator->getPerformanceForDate(item->dateTime.date());
|
||||
if (!p.submaximal && p.powerIndex > 0) {
|
||||
|
||||
// its not submax
|
||||
todaybest = p.powerIndex;
|
||||
if (performances > 0 && performanceDay[performances-1] == day) {
|
||||
if (performanceScore[performances-1] < todaybest) // validating with '<=' not '<' is vital for 2-a-days
|
||||
performanceScore[performances-1] = todaybest;
|
||||
} else {
|
||||
performanceDay[performances] = day;
|
||||
performanceScore[performances] = todaybest;
|
||||
performances++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add more space
|
||||
if (performances == performanceDay.size()) {
|
||||
performanceDay.resize(performanceDay.size() + (days/2));
|
||||
performanceScore.resize(performanceDay.size() + (days/2));
|
||||
}
|
||||
}
|
||||
|
||||
// average out meanscore after accumulated above
|
||||
meanscore /= double(rides);
|
||||
|
||||
// add performances to the banister data, might be useful later
|
||||
// the performanceDay and performanceScore vectors are just there
|
||||
// for model fitting calls to lmcurve.
|
||||
for(int i=0; i<performances; i++) {
|
||||
printd("Performance %d on day=%g score=%g\n", i, performanceDay[i], performanceScore[i]);
|
||||
data[performanceDay[i]].test = performanceScore[i];
|
||||
}
|
||||
|
||||
//for (int i=0; i < data.length(); i++) printd("%d, %g, %g\n", i, data[i].score, data[i].test);
|
||||
printd("%ld non-zero rides, average score %g, %d performance tests\n", rides, meanscore, performances);
|
||||
|
||||
//
|
||||
// 2. FIND FITTING WINDOWS
|
||||
//
|
||||
bool inwindow=false;
|
||||
long lastworkoutday=0;
|
||||
banisterFit adding(this);
|
||||
int testoffset=0;
|
||||
for(int i=0; i<data.length(); i++) {
|
||||
|
||||
if (data[i].score>0) lastworkoutday=i;
|
||||
|
||||
// found the beginning of a season
|
||||
if (!inwindow && data[i].score > 0) {
|
||||
inwindow=true;
|
||||
lastworkoutday=i;
|
||||
adding=banisterFit(this);
|
||||
adding.startIndex=i;
|
||||
adding.startDate=start.addDays(i);
|
||||
}
|
||||
|
||||
// found a test in this season
|
||||
if (inwindow && data[i].test >0) {
|
||||
if (adding.testoffset==-1) adding.testoffset=testoffset;
|
||||
adding.tests++;
|
||||
}
|
||||
|
||||
// move testoffset on
|
||||
if (data[i].test >0) testoffset++;
|
||||
|
||||
// found the end of a season
|
||||
if (inwindow && data[i].score <= 0 && i-lastworkoutday > typical_SeasonBreak) {
|
||||
|
||||
// only useful if we have 2 or more tests to fit to
|
||||
if (adding.tests >= 2) {
|
||||
adding.stopIndex=i;
|
||||
adding.stopDate=start.addDays(i);
|
||||
windows<<adding;
|
||||
}
|
||||
inwindow=false;
|
||||
}
|
||||
}
|
||||
|
||||
foreach(banisterFit f, windows) {
|
||||
printd("window: %s to %s, %d tests (from %d) season %ld days\n",
|
||||
f.startDate.toString().toStdString().c_str(),
|
||||
f.stopDate.toString().toStdString().c_str(),
|
||||
f.tests, f.testoffset,
|
||||
f.stopIndex-f.startIndex);
|
||||
}
|
||||
|
||||
//
|
||||
// EXPAND A FITTING WINDOW WHERE <5 TESTS
|
||||
//
|
||||
int i=0;
|
||||
while(i < windows.length() && windows.count() > 1) {
|
||||
|
||||
// combine and remove prior
|
||||
if (windows[i].tests < 5 && i>0) {
|
||||
windows[i].combine(windows[i-1]);
|
||||
windows.removeAt(--i);
|
||||
} else i++;
|
||||
}
|
||||
|
||||
foreach(banisterFit f, windows) {
|
||||
printd("post combined window: %s to %s, %d tests (from %d) season %ld days\n",
|
||||
f.startDate.toString().toStdString().c_str(),
|
||||
f.stopDate.toString().toStdString().c_str(),
|
||||
f.tests, f.testoffset,
|
||||
f.stopIndex-f.startIndex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
double
|
||||
banisterFit::f(double d, const double *parms)
|
||||
{
|
||||
if (k1 > parms[0] || k1 < parms[0] ||
|
||||
k2 > parms[1] || k2 < parms[1] ||
|
||||
t1 > parms[2] || t1 < parms[2] ||
|
||||
t2 > parms[3] || t2 < parms[3] ||
|
||||
p0 > parms[4] || p0 < parms[4]) {
|
||||
|
||||
// we'll keep the parameters passed
|
||||
k1 = parms[0];
|
||||
k2 = parms[1];
|
||||
t1 = parms[2];
|
||||
t2 = parms[3];
|
||||
p0 = parms[4];
|
||||
|
||||
//printd("fit iter %s to %s [k1=%g k2=%g t1=%g t2=%g p0=%g]\n", startDate.toString().toStdString().c_str(), stopDate.toString().toStdString().c_str(), k1,k2,t1,t2,p0); // too much info even in debug, unless you want it
|
||||
|
||||
// ack, we need to recompute our window using the parameters supplied
|
||||
bool first = true;
|
||||
for (int index=startIndex; index < stopIndex; index++) {
|
||||
|
||||
// g and h are just accumulated training load with different decay parameters
|
||||
if (first) {
|
||||
parent->data[index].g = parent->data[index].h = 0;
|
||||
first = false;
|
||||
} else {
|
||||
parent->data[index].g = (parent->data[index-1].g * exp (-1/t1)) + parent->data[index].score;
|
||||
parent->data[index].h = (parent->data[index-1].h * exp (-1/t2)) + parent->data[index].score;
|
||||
}
|
||||
|
||||
// apply coefficients
|
||||
parent->data[index].pte = parent->data[index].g * k1;
|
||||
parent->data[index].nte = parent->data[index].h * k2;
|
||||
parent->data[index].perf = p0 + parent->data[index].pte - parent->data[index].nte;
|
||||
}
|
||||
}
|
||||
|
||||
// return previously computed
|
||||
//printd("result perf(%s)=%g vs test=%g\n", parent->start.addDays(d).toString().toStdString().c_str(),parent->data[int(d)].perf, parent->data[int(d)].test);
|
||||
return parent->data[int(d)].perf;
|
||||
}
|
||||
|
||||
void
|
||||
banisterFit::combine(banisterFit other)
|
||||
{
|
||||
if (other.startIndex < startIndex) { // before
|
||||
// after
|
||||
startIndex = other.startIndex;
|
||||
startDate = other.startDate;
|
||||
tests += other.tests;
|
||||
testoffset = other.testoffset;
|
||||
|
||||
} else if (other.startIndex > startIndex){ // after
|
||||
// before
|
||||
}
|
||||
}
|
||||
|
||||
// used to wrap a function call when deriving parameters
|
||||
static QMutex calllmfit;
|
||||
static banisterFit *calllmfitmodel = NULL;
|
||||
static double calllmfitf(double t, const double *p) {
|
||||
return static_cast<banisterFit*>(calllmfitmodel)->f(t, p);
|
||||
|
||||
}
|
||||
void Banister::fit()
|
||||
{
|
||||
for(int i=0; i<windows.length(); i++) {
|
||||
|
||||
double prior[5]={ k1, k2, t1, t2, performanceScore[windows[i].testoffset] };
|
||||
|
||||
lm_control_struct control = lm_control_double;
|
||||
control.patience = 1000; // more than this and there really is a problem
|
||||
lm_status_struct status;
|
||||
|
||||
printd("fitting window %d start=%s [k1=%g k2=%g t1=%g t2=%g p0=%g]\n", i, windows[i].startDate.toString().toStdString().c_str(), prior[0], prior[1], prior[2], prior[3], prior[4]);
|
||||
|
||||
// use forwarder via global variable, so mutex around this !
|
||||
calllmfit.lock();
|
||||
calllmfitmodel=&windows[i];
|
||||
|
||||
|
||||
//fprintf(stderr, "Fitting ...\n" ); fflush(stderr);
|
||||
lmcurve(5, prior, windows[i].tests, performanceDay.constData()+windows[i].testoffset, performanceScore.constData()+windows[i].testoffset,
|
||||
calllmfitf, &control, &status);
|
||||
|
||||
// release for others
|
||||
calllmfit.unlock();
|
||||
|
||||
if (status.outcome >= 0) {
|
||||
printd("window %d %s [k1=%g k2=%g t1=%g t2=%g p0=%g]\n", i, lm_infmsg[status.outcome], prior[0], prior[1], prior[2], prior[3], prior[4]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Convert power-duration of test, interval, ride to a percentage comparison
|
||||
// to the mean athlete from opendata (100% would be a mean effort, whilst a
|
||||
@@ -49,25 +419,6 @@ const double typical_CP = 261,
|
||||
// performance measurement.
|
||||
//
|
||||
|
||||
Banister::Banister(Context *context, QString symbol, double t1, double t2, double k1, double k2)
|
||||
{
|
||||
// todo
|
||||
}
|
||||
|
||||
Banister::~Banister()
|
||||
{
|
||||
// todo
|
||||
}
|
||||
void Banister::refit()
|
||||
{
|
||||
// todo
|
||||
}
|
||||
|
||||
void Banister::refresh()
|
||||
{
|
||||
// todo
|
||||
}
|
||||
|
||||
// power index metric
|
||||
double powerIndex(double averagepower, double duration)
|
||||
{
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
#include "RideMetric.h"
|
||||
#include "Context.h"
|
||||
#include "Estimator.h"
|
||||
#include <QObject>
|
||||
#include <QDate>
|
||||
#include <QVector>
|
||||
@@ -33,12 +34,39 @@ extern const double typical_CP, typical_WPrime, typical_Pmax;
|
||||
// calculate power index, used to model in cp plot too
|
||||
extern double powerIndex(double averagepower, double duration);
|
||||
|
||||
// gap with no training that constitutes break in seasons
|
||||
extern const int typical_SeasonBreak;
|
||||
|
||||
struct banisterData{
|
||||
class banisterData{
|
||||
public:
|
||||
banisterData() : score(0), nte(0), pte(0), perf(0), test(0) {}
|
||||
double score, // TRIMP, BikeScore etc for day
|
||||
g, // accumulated load with t1 decay for pte
|
||||
h, // accumulated load with t2 decay for nte
|
||||
nte, // Negative Training Effect
|
||||
pte, // Positive Training Effect
|
||||
perf; // Performance (nte+pte)
|
||||
perf, // Performance (nte+pte)
|
||||
test; // test value
|
||||
};
|
||||
|
||||
class Banister;
|
||||
class banisterFit {
|
||||
public:
|
||||
banisterFit(Banister *parent) : p0(0),k1(0),k2(0),t1(0),t2(0),tests(0),testoffset(-1),parent(parent) {}
|
||||
|
||||
double f(double t, const double *p);
|
||||
void combine(banisterFit other);
|
||||
|
||||
long startIndex, stopIndex;
|
||||
QDate startDate, stopDate;
|
||||
|
||||
double p0,k1,k2,t1,t2;
|
||||
|
||||
// for indexing into banister::performanceDay and banister::performanceScore
|
||||
int tests;
|
||||
int testoffset;
|
||||
|
||||
Banister *parent;
|
||||
};
|
||||
|
||||
class Banister : public QObject {
|
||||
@@ -48,22 +76,39 @@ class Banister : public QObject {
|
||||
public:
|
||||
|
||||
Banister(Context *context, QString symbol, double t1, double t2, double k1=0, double k2=0);
|
||||
~Banister();
|
||||
double value(QDate date, int type);
|
||||
|
||||
// model parameters
|
||||
double k1, k2;
|
||||
double t1, t2;
|
||||
// model parameters - initial 'priors' to use
|
||||
QString symbol; // load metric
|
||||
double k1, k2; // nte/pte coefficients
|
||||
double t1, t2; // nte/pte decay constants
|
||||
|
||||
// metric
|
||||
RideMetric *metric;
|
||||
QDate start, stop;
|
||||
int days;
|
||||
|
||||
QDate start, stop;
|
||||
long long days;
|
||||
|
||||
// curves
|
||||
double meanscore; // whats the average non-zero ride score (used for scaling)
|
||||
long rides; // how many rides are there with a non-zero score
|
||||
|
||||
// time series data
|
||||
QVector<banisterData> data;
|
||||
|
||||
// fitting windows with parameter estimates
|
||||
// window effectively a season
|
||||
QList<banisterFit> windows;
|
||||
|
||||
// performances as double
|
||||
int performances;
|
||||
QVector<double> performanceDay, performanceScore;
|
||||
|
||||
public slots:
|
||||
void refresh();
|
||||
void refit();
|
||||
|
||||
void init(); // reset previous fits
|
||||
void refresh(); // collect data from rides etc
|
||||
void fit(); // perform fits along windows
|
||||
|
||||
private:
|
||||
Context *context;
|
||||
|
||||
@@ -44,6 +44,7 @@ class Performance {
|
||||
double x; // different units, but basically when as a julian day
|
||||
};
|
||||
|
||||
class Banister;
|
||||
class Estimator : public QThread {
|
||||
|
||||
Q_OBJECT
|
||||
@@ -76,6 +77,7 @@ class Estimator : public QThread {
|
||||
protected:
|
||||
|
||||
friend class ::Athlete;
|
||||
friend class ::Banister;
|
||||
|
||||
Context *context;
|
||||
QMutex lock;
|
||||
|
||||
Reference in New Issue
Block a user