Private
Public Access
1
0

attempt 1

This commit is contained in:
Sander Roosendaal
2022-07-08 18:03:53 +02:00
parent 7a9e63b7e4
commit b8f7e76d97
5 changed files with 2517 additions and 2328 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -160,161 +160,10 @@ def rdata(file, rower=rrower()): # pragma: no cover
return res return res
# Creates C2 stroke data
def create_c2_stroke_data_db(
distance, duration, workouttype,
workoutid, starttimeunix, csvfilename, debug=False): # pragma: no cover
nr_strokes = int(distance/10.)
totalseconds = duration.hour*3600.
totalseconds += duration.minute*60.
totalseconds += duration.second
totalseconds += duration.microsecond/1.e6
try:
spm = 60.*nr_strokes/totalseconds
except ZeroDivisionError:
spm = 20*np.zeros(nr_strokes)
try:
_ = totalseconds/float(nr_strokes)
except ZeroDivisionError:
return 0
elapsed = np.arange(nr_strokes)*totalseconds/(float(nr_strokes-1))
d = np.arange(nr_strokes)*distance/(float(nr_strokes-1))
unixtime = starttimeunix + elapsed
pace = 500.*totalseconds/distance
if workouttype in ['rower', 'slides', 'dynamic']:
try:
velo = distance/totalseconds
except ZeroDivisionError:
velo = 0
power = 2.8*velo**3
else:
power = 0
df = pd.DataFrame({
'TimeStamp (sec)': unixtime,
' Horizontal (meters)': d,
' Cadence (stokes/min)': spm,
' Stroke500mPace (sec/500m)': pace,
' ElapsedTime (sec)': elapsed,
' Power (watts)': power,
' HRCur (bpm)': np.zeros(nr_strokes),
' longitude': np.zeros(nr_strokes),
' latitude': np.zeros(nr_strokes),
' DragFactor': np.zeros(nr_strokes),
' DriveLength (meters)': np.zeros(nr_strokes),
' StrokeDistance (meters)': np.zeros(nr_strokes),
' DriveTime (ms)': np.zeros(nr_strokes),
' StrokeRecoveryTime (ms)': np.zeros(nr_strokes),
' AverageDriveForce (lbs)': np.zeros(nr_strokes),
' PeakDriveForce (lbs)': np.zeros(nr_strokes),
' lapIdx': np.zeros(nr_strokes),
'cum_dist': d
})
df[' ElapsedTime (sec)'] = df['TimeStamp (sec)']
_ = df.to_csv(csvfilename, index_label='index', compression='gzip')
data = dataprep(df, id=workoutid, bands=False, debug=debug)
return data
# Saves C2 stroke data to CSV and database # Saves C2 stroke data to CSV and database
def add_c2_stroke_data_db(strokedata, workoutid, starttimeunix, csvfilename,
debug=False, workouttype='rower'):
res = make_cumvalues(0.1*strokedata['t'])
cum_time = res[0]
lapidx = res[1]
unixtime = cum_time+starttimeunix
# unixtime[0] = starttimeunix
seconds = 0.1*strokedata.loc[:, 't']
nr_rows = len(unixtime)
try: # pragma: no cover
latcoord = strokedata.loc[:, 'lat']
loncoord = strokedata.loc[:, 'lon']
except:
latcoord = np.zeros(nr_rows)
loncoord = np.zeros(nr_rows)
try:
strokelength = strokedata.loc[:, 'strokelength']
except:
strokelength = np.zeros(nr_rows)
dist2 = 0.1*strokedata.loc[:, 'd']
try:
spm = strokedata.loc[:, 'spm']
except KeyError: # pragma: no cover
spm = 0*dist2
try:
hr = strokedata.loc[:, 'hr']
except KeyError: # pragma: no cover
hr = 0*spm
pace = strokedata.loc[:, 'p']/10.
pace = np.clip(pace, 0, 1e4)
pace = pace.replace(0, 300)
velo = 500./pace
power = 2.8*velo**3
if workouttype == 'bike': # pragma: no cover
velo = 1000./pace
# save csv
# Create data frame with all necessary data to write to csv
df = pd.DataFrame({'TimeStamp (sec)': unixtime,
' Horizontal (meters)': dist2,
' Cadence (stokes/min)': spm,
' HRCur (bpm)': hr,
' longitude': loncoord,
' latitude': latcoord,
' Stroke500mPace (sec/500m)': pace,
' Power (watts)': power,
' DragFactor': np.zeros(nr_rows),
' DriveLength (meters)': np.zeros(nr_rows),
' StrokeDistance (meters)': strokelength,
' DriveTime (ms)': np.zeros(nr_rows),
' StrokeRecoveryTime (ms)': np.zeros(nr_rows),
' AverageDriveForce (lbs)': np.zeros(nr_rows),
' PeakDriveForce (lbs)': np.zeros(nr_rows),
' lapIdx': lapidx,
' WorkoutState': 4,
' ElapsedTime (sec)': seconds,
'cum_dist': dist2
})
df.sort_values(by='TimeStamp (sec)', ascending=True)
# Create CSV file name and save data to CSV file
res = df.to_csv(csvfilename, index_label='index',
compression='gzip')
try:
data = dataprep(df, id=workoutid, bands=False, debug=debug)
except: # pragma: no cover
return 0
return data
def handle_nonpainsled(f2, fileformat, summary=''): # pragma: no cover def handle_nonpainsled(f2, fileformat, summary=''): # pragma: no cover
oarlength = 2.89 oarlength = 2.89
inboard = 0.88 inboard = 0.88
@@ -413,47 +262,6 @@ def update_strokedata(id, df, debug=False):
return rowdata return rowdata
def update_empower(id, inboard, oarlength, boattype, df, f1, debug=False): # pragma: no cover
corr_factor = 1.0
if 'x' in boattype:
# sweep
a = 0.06
b = 0.275
else:
# scull
a = 0.15
b = 0.275
corr_factor = empower_bug_correction(oarlength, inboard, a, b)
success = False
try:
df['power empower old'] = df[' Power (watts)']
df[' Power (watts)'] = df[' Power (watts)'] * corr_factor
df['driveenergy empower old'] = df['driveenergy']
df['driveenergy'] = df['driveenergy'] * corr_factor
success = True
except KeyError:
pass
if success:
delete_strokedata(id, debug=debug)
if debug: # pragma: no cover
print("updated ", id)
print("correction ", corr_factor)
else:
if debug: # pragma: no cover
print("not updated ", id)
_ = dataprep(df, id=id, bands=True, barchart=True, otwpower=True, debug=debug)
row = rrdata(df=df)
row.write_csv(f1, gzip=True)
return success
def testdata(time, distance, pace, spm): # pragma: no cover def testdata(time, distance, pace, spm): # pragma: no cover
t1 = np.issubdtype(time, np.number) t1 = np.issubdtype(time, np.number)
@@ -633,42 +441,11 @@ def delete_agegroup_db(age, sex, weightcategory, debug=False):
engine.dispose() engine.dispose()
def update_agegroup_db(age, sex, weightcategory, wcdurations, wcpower,
debug=False):
delete_agegroup_db(age, sex, weightcategory, debug=debug)
wcdurations = [None if type(y) is float and np.isnan(
y) else y for y in wcdurations]
wcpower = [None if type(y) is float and np.isnan(y)
else y for y in wcpower]
df = pd.DataFrame(
{
'duration': wcdurations,
'power': wcpower,
}
)
df['sex'] = sex
df['age'] = age
df['weightcategory'] = weightcategory
df.replace([np.inf, -np.inf], np.nan, inplace=True)
df.dropna(axis=0, inplace=True)
if debug: # pragma: no cover # pragma: no cover
engine = create_engine(database_url_debug, echo=False)
else:
engine = create_engine(database_url, echo=False)
table = 'calcagegrouprecords'
with engine.connect() as conn, conn.begin():
df.to_sql(table, engine, if_exists='append', index=False)
conn.close()
engine.dispose()
def updatecpdata_sql(rower_id, delta, cp, table='cpdata', distance=pd.Series([], dtype='float'), debug=False):
def updatecpdata_sql(rower_id, delta, cp, table='cpdata',
distance=pd.Series([], dtype='float'), debug=False):
deletecpdata_sql(rower_id, table=table, debug=debug) deletecpdata_sql(rower_id, table=table, debug=debug)
df = pd.DataFrame( df = pd.DataFrame(
{ {

2050
rowers/dataroutines.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@ from django.shortcuts import redirect
from django.http import HttpResponse from django.http import HttpResponse
from django.contrib import messages from django.contrib import messages
from rowers.mytypes import otwtypes from rowers.mytypes import otwtypes
from rowers.tasks import handle_sendemail_expired #from rowers.tasks import handle_sendemail_expired
from django.utils import timezone from django.utils import timezone
from rowers.models import Workout, PowerTimeFitnessMetric, Rower, PaidPlan from rowers.models import Workout, PowerTimeFitnessMetric, Rower, PaidPlan
import datetime import datetime
@@ -99,6 +99,7 @@ class RowerPlanMiddleWare(object):
# remove from Free Coach groups # remove from Free Coach groups
# send email # send email
from rowers.tasks import handle_sendemail_expired
_ = myqueue(queue, _ = myqueue(queue,
handle_sendemail_expired, handle_sendemail_expired,
r.user.email, r.user.email,

View File

@@ -1,3 +1,18 @@
import os
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
from YamJam import yamjam
CFG = yamjam()['rowsandallapp']
try:
os.environ.setdefault("DJANGO_SETTINGS_MODULE",CFG['settings_name'])
except KeyError:
os.environ.setdefault("DJANGO_SETTINGS_MODULE","rowsandall_app.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from rowers.models import *
import math import math
from rowers.courseutils import ( from rowers.courseutils import (
coursetime_paths, coursetime_first, time_in_path, coursetime_paths, coursetime_first, time_in_path,
@@ -16,8 +31,7 @@ import rowers.longtask as longtask
import requests import requests
import rowers.datautils as datautils import rowers.datautils as datautils
""" Background tasks done by Celery (develop) or QR (production) """ """ Background tasks done by QR (production) """
import os
import time import time
import gc import gc
import gzip import gzip
@@ -61,13 +75,15 @@ import rowers.otw_power_calculator_pb2_grpc as calculator_pb2_grpc
import rowers.rowing_workout_metrics_pb2 as metrics_pb2 import rowers.rowing_workout_metrics_pb2 as metrics_pb2
import rowers.rowing_workout_metrics_pb2_grpc as metrics_pb2_grpc import rowers.rowing_workout_metrics_pb2_grpc as metrics_pb2_grpc
from rowsandall_app.settings import SITE_URL from django.conf import settings
from rowsandall_app.settings_dev import SITE_URL as SITE_URL_DEV SITE_URL = settings.SITE_URL
from rowsandall_app.settings import PROGRESS_CACHE_SECRET SITE_URL_DEV = settings.SITE_URL
from rowsandall_app.settings import SETTINGS_NAME PROGRESS_CACHE_SECRET = settings.PROGRESS_CACHE_SECRET
from rowsandall_app.settings import workoutemailbox SETTINGS_NAME = settings.SETTINGS_NAME
from rowsandall_app.settings import UPLOAD_SERVICE_SECRET, UPLOAD_SERVICE_URL
from rowsandall_app.settings import NK_API_LOCATION UPLOAD_SERVICE_URL = settings.UPLOAD_SERVICE_URL
UPLOAD_SERVICE_SECRET = settings.UPLOAD_SERVICE_SECRET
NK_API_LOCATION = settings.NK_API_LOCATION
from requests_oauthlib import OAuth1, OAuth1Session from requests_oauthlib import OAuth1, OAuth1Session
@@ -83,16 +99,19 @@ from rowers.emails import htmlstrip
from rowers import mytypes from rowers import mytypes
from rowers.dataprepnodjango import ( from rowers.dataroutines import (
getsmallrowdata_db, updatecpdata_sql, update_c2id_sql, getsmallrowdata_db, updatecpdata_sql, update_c2id_sql,
update_workout_field_sql, #update_workout_field_sql,
update_agegroup_db, update_strokedata, update_agegroup_db, update_strokedata,
add_c2_stroke_data_db, totaltime_sec_to_string, add_c2_stroke_data_db, totaltime_sec_to_string,
create_c2_stroke_data_db, update_empower, create_c2_stroke_data_db, update_empower,
database_url_debug, database_url, dataprep, # database_url_debug,
database_url, dataprep,
# create_strava_stroke_data_db # create_strava_stroke_data_db
) )
database_url_debug = database_url
from rowers.opaque import encoder from rowers.opaque import encoder
@@ -297,10 +316,11 @@ def handle_c2_sync(workoutid, url, headers, data, debug=False, **kwargs):
s = response.json() s = response.json()
c2id = s['data']['id'] c2id = s['data']['id']
res = update_workout_field_sql( workout = Workout.objects.get(id=workoutid)
workoutid, 'uploadedtoc2', c2id, debug=debug) workout.uploadedtoc2 = c2id
workout.save()
return res return 1
@app.task @app.task
@@ -316,8 +336,10 @@ def handle_sporttracks_sync(workoutid, url, headers, data, debug=False, **kwargs
id = int(m) id = int(m)
_ = update_workout_field_sql( workout = Workout.objects.get(id=workoutid)
workoutid, 'uploadedtosporttracks', id, debug=debug) workout.uploadedtosporttracks = id
workout.save()
return 1 return 1
@@ -378,8 +400,9 @@ def handle_strava_sync(stravatoken, workoutid, filename, name, activity_type, de
failed = True failed = True
if not failed: if not failed:
_ = update_workout_field_sql( workout = Workout.objects.get(id=workoutid)
workoutid, 'uploadedtostrava', res.id, debug=debug) workout.uploadedtostrava = res.id
workout.save()
try: try:
act = client.update_activity(res.id, activity_type=activity_type, act = client.update_activity(res.id, activity_type=activity_type,
description=description, device_name='Rowsandall.com') description=description, device_name='Rowsandall.com')