Private
Public Access
1
0

Algo to reduce too high resolution data files

This commit is contained in:
Sander Roosendaal
2017-06-12 20:51:09 +02:00
parent a4c241275b
commit 988a9361d2

View File

@@ -204,6 +204,15 @@ def getcp(dfgrouped,logarr):
cpvalue = pd.Series(cpvalue,name='CP')
return delta,cpvalue,avgpower
def df_resample(datadf):
# time stamps must be in seconds
timestamps = datadf['TimeStamp (sec)'].astype('int')
datadf['timestamps'] = timestamps
newdf = datadf.groupby(['timestamps']).mean()
return newdf
def clean_df_stats(datadf,workstrokesonly=True,ignorehr=True,
ignoreadvanced=False):
# clean data remove zeros and negative values
@@ -479,6 +488,13 @@ def save_workout_database(f2,r,dosmooth=True,workouttype='rower',
powerperc=powerperc,powerzones=r.powerzones)
row = rdata(f2,rower=rr)
dtavg = row.df['TimeStamp (sec)'].diff().mean()
if dtavg < 1:
newdf = df_resample(row.df)
return new_workout_from_df(r,newdf,
title=title)
checks = row.check_consistency()
allchecks = 1
for key,value in checks.iteritems():