Private
Public Access
1
0
This commit is contained in:
2024-03-13 18:57:18 +01:00
parent 4b83a251db
commit 5149f08de7
9 changed files with 97 additions and 228 deletions

View File

@@ -502,8 +502,28 @@ def calculate_goldmedalstandard(rower, workout, recurrance=True):
def setcp(workout, background=False, recurrance=True):
filename = 'media/cpdata_{id}.parquet.gz'.format(id=workout.id)
try:
filename = 'media/cpdata_{id}.parquet.gz'.format(id=workout.id)
df = pd.read_parquet(filename)
if not df.empty:
# check dts
tarr = datautils.getlogarr(4000)
if df['delta'][0] in tarr:
return(df, df['delta'], df['cp'])
except:
pass
strokesdf = getsmallrowdata_db(
['power', 'workoutid', 'time'], ids=[workout.id])
if strokesdf.empty:
return pd.DataFrame({'delta': [], 'cp': []}), pd.Series(dtype='float'), pd.Series(dtype='float')
totaltime = strokesdf['time'].max()
maxt = totaltime/1000.
logarr = datautils.getlogarr(maxt)
csvfilename = workout.csvfilename
# check what the real file name is
if os.path.exists(csvfilename):
@@ -514,80 +534,53 @@ def setcp(workout, background=False, recurrance=True):
csvfile = csvfilename+'.gz'
else: # pragma: no cover
return pd.DataFrame({'delta': [], 'cp': []}), pd.Series(dtype='float'), pd.Series(dtype='float')
csvfile = os.path.abspath(csvfile)
strokesdf = getsmallrowdata_db(
['power', 'workoutid', 'time'], ids=[workout.id])
protos = grpc.protos("rowing_workout_metrics.proto")
services = grpc.services("rowing_workout_metrics.proto")
req = protos.CPRequest(
filename = csvfile,
filetype = "CSV",
tarr = datautils.getlogarr(1.05*strokesdf['time'].max())
)
response = services.GetCP(req, "localhost:50052", insecure=True)
delta = response.delta
cpvalues = response.power
with grpc.insecure_channel(
target='localhost:50052',
options=[('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0), ('grpc.keepalive_timeout_ms',
10000)]
) as channel:
try:
grpc.channel_ready_future(channel).result(timeout=10)
except grpc.FutureTimeoutError: # pragma: no cover
dologging('metrics.log','grpc channel time out in setcp')
return pd.DataFrame({'delta': [], 'cp': []}), pd.Series(dtype='float'), pd.Series(dtype='float')
stub = metrics_pb2_grpc.MetricsStub(channel)
req = metrics_pb2.CPRequest(filename = csvfile, filetype = "CSV", tarr = logarr)
try:
response = stub.GetCP(req, timeout=60)
except Exception as e:
dologging('metrics.log', traceback.format_exc())
return pd.DataFrame({'delta': [], 'cp': []}), pd.Series(dtype='float'), pd.Series(dtype='float')
delta = pd.Series(np.array(response.delta))
cpvalues = pd.Series(np.array(response.power))
powermean = response.avgpower
df = pd.DataFrame({
'delta': delta,
'cp': cpvalues,
'id': workout.id,
})
df.to_parquet(filename, engine='fastparquet', compression='GZIP')
if recurrance:
goldmedalstandard, goldmedalduration = calculate_goldmedalstandard(
workout.user, workout)
workout.goldmedalstandard = goldmedalstandard
workout.goldmedalduration = goldmedalduration
workout.save()
return df, delta, cpvalues
try:
if strokesdf['power'].std() == 0:
return pd.DataFrame(), pd.Series(dtype='float'), pd.Series(dtype='float')
except (KeyError, TypeError):
return pd.DataFrame(), pd.Series(dtype='float'), pd.Series(dtype='float')
if background: # pragma: no cover
_ = myqueue(queuelow, handle_setcp, strokesdf, filename, workout.id)
return pd.DataFrame({'delta': [], 'cp': []}), pd.Series(dtype='float'), pd.Series(dtype='float')
if not strokesdf.empty:
totaltime = strokesdf['time'].max()
try:
powermean = strokesdf['power'].mean()
except KeyError: # pragma: no cover
powermean = 0
if powermean != 0:
thesecs = totaltime
maxt = 1.05 * thesecs
if maxt > 0:
logarr = datautils.getlogarr(maxt)
dfgrouped = strokesdf.groupby(['workoutid'])
delta, cpvalues, avgpower = datautils.getcp(dfgrouped, logarr)
df = pd.DataFrame({
'delta': delta,
'cp': cpvalues,
'id': workout.id,
})
df.to_parquet(filename, engine='fastparquet',
compression='GZIP')
if recurrance:
goldmedalstandard, goldmedalduration = calculate_goldmedalstandard(
workout.user, workout)
workout.goldmedalstandard = goldmedalstandard
workout.goldmedalduration = goldmedalduration
workout.save()
return df, delta, cpvalues
return pd.DataFrame({'delta': [], 'cp': []}), pd.Series(dtype='float'), pd.Series(dtype='float')
def update_wps(r, types, mode='water', asynchron=True):
@@ -723,6 +716,7 @@ def join_workouts(r, ids, title='Joined Workout',
def fetchcp_new(rower, workouts):
data = []
for workout in workouts:
cpfile = 'media/cpdata_{id}.parquet.gz'.format(id=workout.id)
try:
@@ -743,12 +737,15 @@ def fetchcp_new(rower, workouts):
if len(data) > 1:
df = pd.concat(data, axis=0)
try:
df = df[df['cp'] == df.groupby(['delta'])['cp'].transform('max')]
except KeyError: # pragma: no cover
return pd.Series(dtype='float'), pd.Series(dtype='float'), 0, pd.Series(dtype='float'), pd.Series(dtype='float')
df = df.sort_values(['delta']).reset_index()
df = df[df['cp']>20]
return df['delta'], df['cp'], 0, df['workout'], df['url']