Private
Public Access
1
0

first bit of plotting extra metrics

Need to look at the data duplication - where does it come from
This commit is contained in:
Sander Roosendaal
2017-11-28 23:12:32 +01:00
parent 79d20bb860
commit 4e5b661923
2 changed files with 65 additions and 14 deletions

View File

@@ -1547,8 +1547,27 @@ def getrowdata_db(id=0, doclean=False, convertnewtons=True):
def getsmallrowdata_db(columns, ids=[], doclean=True, workstrokesonly=True):
prepmultipledata(ids)
data = read_cols_df_sql(ids, columns)
data,extracols = read_cols_df_sql(ids, columns)
if extracols and len(ids)==1:
w = Workout.objects.get(id=ids[0])
row = rdata(w.csvfilename)
f = row.df['TimeStamp (sec)'].diff().mean()
if f != 0 and not np.isnan(f):
windowsize = 2 * (int(10. / (f))) + 1
else:
windowsize = 1
for c in extracols:
try:
cdata = row.df[c]
cdata.fillna(inplace=True,method='bfill')
# This doesn't work because sometimes data are duplicated at save
cdata2 = savgol_filter(cdata.values,windowsize,3)
print len(cdata),len(cdata2),'mies'
data[c] = cdata
except KeyError:
data[c] = 0
# convert newtons
if doclean:
@@ -1622,9 +1641,12 @@ def read_cols_df_sql(ids, columns, convertnewtons=True):
# axx = [ax[0] for ax in axes]
axx = [f.name for f in StrokeData._meta.get_fields()]
extracols = []
for c in columns:
if not c in axx:
columns.remove(c)
extracols.append(c)
columns = list(columns) + ['distance', 'spm', 'workoutid']
columns = [x for x in columns if x != 'None']
@@ -1673,7 +1695,7 @@ def read_cols_df_sql(ids, columns, convertnewtons=True):
'averageforce'] * lbstoN
engine.dispose()
return df
return df,extracols
# Read stroke data from the DB for a Workout ID. Returns a pandas dataframe
@@ -1973,6 +1995,7 @@ def dataprep(rowdatadf, id=0, bands=True, barchart=True, otwpower=True,
)
)
if bands:
# HR bands
data['hr_ut2'] = rowdatadf.ix[:, 'hr_ut2']
@@ -2133,6 +2156,7 @@ def dataprep(rowdatadf, id=0, bands=True, barchart=True, otwpower=True,
# write data if id given
if id != 0:
data['workoutid'] = id
engine = create_engine(database_url, echo=False)
with engine.connect() as conn, conn.begin():
data.to_sql('strokedata', engine, if_exists='append', index=False)