Private
Public Access
1
0
This commit is contained in:
Sander Roosendaal
2022-02-17 12:41:12 +01:00
parent 31109da378
commit e9c13d3fe7
3 changed files with 64 additions and 101 deletions

View File

@@ -74,11 +74,12 @@ def getagegrouprecord(age, sex='male', weightcategory='hwt',
ages = df['age']
powers = df['power']
#poly_coefficients = np.polyfit(ages,powers,6)
def fitfunc(pars, x): return np.abs(pars[0])*(1-x/max(120, pars[1]))-np.abs(
pars[2])*np.exp(-x/np.abs(pars[3]))+np.abs(pars[4])*(np.sin(np.pi*x/max(50, pars[5])))
def fitfunc(pars, x):
return np.abs(pars[0])*(1-x/max(120, pars[1]))-np.abs(
pars[2])*np.exp(-x/np.abs(pars[3]))+np.abs(pars[4])*(np.sin(np.pi*x/max(50, pars[5])))
def errfunc(pars, x, y): return fitfunc(pars, x)-y
def errfunc(pars, x, y):
return fitfunc(pars, x)-y
p0 = [700, 120, 700, 10, 100, 100]
@@ -92,8 +93,6 @@ def getagegrouprecord(age, sex='male', weightcategory='hwt',
if success:
power = fitfunc(p1, float(age))
#power = np.polyval(poly_coefficients,age)
power = 0.5*(np.abs(power)+power)
else: # pragma: no cover
power = 0
@@ -123,14 +122,13 @@ oauth_data = {
def c2_open(user):
r = Rower.objects.get(user=user)
if (r.c2token == '') or (r.c2token is None):
s = "Token doesn't exist. Need to authorize"
raise NoTokenError("User has no token")
else:
if (timezone.now() > r.tokenexpirydate):
res = rower_c2_token_refresh(user)
if res == None: # pragma: no cover
if res is None: # pragma: no cover
raise NoTokenError("User has no token")
if res[0] != None:
if res[0] is not None:
thetoken = res[0]
else: # pragma: no cover
raise NoTokenError("User has no token")
@@ -142,7 +140,7 @@ def c2_open(user):
def get_c2_workouts(rower, page=1, do_async=True):
try:
thetoken = c2_open(rower.user)
_ = c2_open(rower.user)
except NoTokenError: # pragma: no cover
return 0
@@ -175,7 +173,7 @@ def get_c2_workouts(rower, page=1, do_async=True):
knownc2ids = uniqify(knownc2ids+tombstones+parkedids)
newids = [c2id for c2id in c2ids if not c2id in knownc2ids]
newids = [c2id for c2id in c2ids if c2id not in knownc2ids]
if settings.TESTING:
newids = c2ids
@@ -188,20 +186,18 @@ def get_c2_workouts(rower, page=1, do_async=True):
counter = 0
for c2id in newids:
if do_async: # pragma: no cover
res = myqueue(queuehigh,
handle_c2_async_workout,
alldata,
rower.user.id,
rower.c2token,
c2id,
counter,
rower.defaulttimezone
)
#res = handle_c2_async_workout(alldata,rower.user.id,rower.c2token,c2id,counter)
_ = myqueue(queuehigh,
handle_c2_async_workout,
alldata,
rower.user.id,
rower.c2token,
c2id,
counter,
rower.defaulttimezone)
counter = counter+1
else:
workoutid = create_async_workout(alldata,
rower.user, c2id)
_ = create_async_workout(alldata, rower.user, c2id)
return 1
@@ -212,12 +208,9 @@ def create_async_workout(alldata, user, c2id):
data = alldata[c2id]
splitdata = None
distance = data['distance']
c2id = data['id']
workouttype = data['type']
verified = data['verified']
startdatetime = iso8601.parse_date(data['date'])
weightclass = data['weight_class']
try:
title = data['name']
@@ -229,43 +222,16 @@ def create_async_workout(alldata, user, c2id):
except:
title = ''
weightcategory = 'hwt'
if weightclass == "L":
weightcategory = 'lwt'
# Create CSV file name and save data to CSV file
csvfilename = 'media/Import_'+str(c2id)+'.csv.gz'
totaltime = data['time']/10.
duration = dataprep.totaltime_sec_to_string(totaltime)
try:
timezone_str = data['timezone']
except: # pragma: no cover
timezone_str = 'UTC'
workoutdate = startdatetime.astimezone(
pytz.timezone(timezone_str)
).strftime('%Y-%m-%d')
starttime = startdatetime.astimezone(
pytz.timezone(timezone_str)
).strftime('%H:%M:%S')
try:
notes = data['comments']
name = notes[:40]
except (KeyError, TypeError):
notes = 'C2 Import Workout from {startdatetime}'.format(
startdatetime=startdatetime)
name = notes
r = Rower.objects.get(user=user)
authorizationstring = str('Bearer ' + r.c2token)
headers = {'Authorization': authorizationstring,
'user-agent': 'sanderroosendaal',
'Content-Type': 'application/json'}
url2 = "https://log.concept2.com/api/users/me/results"+str(c2id)
# url2 = "https://log.concept2.com/api/users/me/results"+str(c2id)
url = "https://log.concept2.com/api/users/me/results/"+str(c2id)+"/strokes"
try:
s = requests.get(url, headers=headers)
@@ -638,7 +604,7 @@ def createc2workoutdata(w):
spmav = int(row.df[' Cadence (stokes/min)'][mask].mean())
hrav = int(row.df[' HRCur (bpm)'][mask].mean())
except ValueError:
smpav = 0
spmav = 0
try:
hrav = int(row.df[' HRCur (bpm)'][mask].mean())
except ValuError:
@@ -729,7 +695,6 @@ def createc2workoutdata(w):
"time": int(10*makeseconds(durationstr)),
"weight_class": c2wc(w.weightcategory),
"comments": w.notes,
"stroke_count": int(row.stroke_count),
'stroke_rate': int(row.df[' Cadence (stokes/min)'].mean()),
'drag_factor': int(row.dragfactor),
"heart_rate": {
@@ -749,7 +714,7 @@ def createc2workoutdata(w):
def do_refresh_token(refreshtoken):
scope = "results:write,user:read"
client_auth = requests.auth.HTTPBasicAuth(C2_CLIENT_ID, C2_CLIENT_SECRET)
# client_auth = requests.auth.HTTPBasicAuth(C2_CLIENT_ID, C2_CLIENT_SECRET)
post_data = {"grant_type": "refresh_token",
"client_secret": C2_CLIENT_SECRET,
"client_id": C2_CLIENT_ID,
@@ -793,7 +758,7 @@ def do_refresh_token(refreshtoken):
def get_token(code):
messg = ''
scope = "user:read,results:write"
client_auth = requests.auth.HTTPBasicAuth(C2_CLIENT_ID, C2_CLIENT_SECRET)
# client_auth = requests.auth.HTTPBasicAuth(C2_CLIENT_ID, C2_CLIENT_SECRET)
post_data = {"grant_type": "authorization_code",
"code": code,
"redirect_uri": C2_REDIRECT_URI,
@@ -840,7 +805,7 @@ def make_authorization_url(request): # pragma: no cover
# Generate a random string for the state parameter
# Save it for use later to prevent xsrf attacks
from uuid import uuid4
state = str(uuid4())
# state = str(uuid4())
scope = "user:read,results:write"
params = {"client_id": C2_CLIENT_ID,
@@ -857,14 +822,14 @@ def make_authorization_url(request): # pragma: no cover
def get_workout(user, c2id, do_async=True):
r = Rower.objects.get(user=user)
thetoken = c2_open(user)
_ = c2_open(user)
job = myqueue(queuehigh,
handle_c2_getworkout,
user.id,
r.c2token,
c2id,
r.defaulttimezone)
_ = myqueue(queuehigh,
handle_c2_getworkout,
user.id,
r.c2token,
c2id,
r.defaulttimezone)
return 1
@@ -909,7 +874,7 @@ def get_username(access_token): # pragma: no cover
try:
res = me_json['data']['username']
id = me_json['data']['id']
_ = me_json['data']['id']
except KeyError:
res = None
@@ -974,7 +939,7 @@ def workout_c2_upload(user, w, asynchron=False):
except KeyError: # pragma: no cover
return "This workout type cannot be uploaded to Concept2", 0
thetoken = c2_open(user)
_ = c2_open(user)
r = Rower.objects.get(user=user)
@@ -1017,16 +982,17 @@ def workout_c2_upload(user, w, asynchron=False):
w.save()
message = "Upload to Concept2 was successful"
else: # pragma: no cover
message = "Something went wrong in workout_c2_upload_view. Response code 200/201 but C2 sync failed: "+response.text
message = "Something went wrong in workout_c2_upload_view." \
" Response code 200/201 but C2 sync failed: "+response.text
c2id = 0
else: # pragma: no cover
job = myqueue(queue,
handle_c2_sync,
w.id,
url,
headers,
json.dumps(data, default=default))
_ = myqueue(queue,
handle_c2_sync,
w.id,
url,
headers,
json.dumps(data, default=default))
c2id = 0
return message, c2id

View File

@@ -2,6 +2,7 @@ from rowers.utils import totaltime_sec_to_string
from rowers.metrics import dtypes
import datetime
from scipy.signal import savgol_filter
import os
# This is Data prep used for testing purposes (no Django environment)
# Uses the debug SQLite database for stroke data
@@ -35,6 +36,12 @@ from rowers.utils import lbstoN
import pytz
from timezonefinder import TimezoneFinder
from rowingdata import (
RowProParser, TCXParser, MysteryParser, RowPerfectParser,
ErgDataParser, CoxMateParser, BoatCoachAdvancedParser, BoatCoachOTWParser,
BoatCoachParser, painsledDesktopParser, SpeedCoach2Parser, speedcoachParser,
ErgStickParser, FITParser, fitsummarydata
)
try:
user = DATABASES['default']['USER']
@@ -171,14 +178,12 @@ def create_c2_stroke_data_db(
spm = 20*np.zeros(nr_strokes)
try:
step = totalseconds/float(nr_strokes)
_ = totalseconds/float(nr_strokes)
except ZeroDivisionError:
return 0
elapsed = np.arange(nr_strokes)*totalseconds/(float(nr_strokes-1))
dstep = distance/float(nr_strokes)
d = np.arange(nr_strokes)*distance/(float(nr_strokes-1))
unixtime = starttimeunix + elapsed
@@ -215,12 +220,9 @@ def create_c2_stroke_data_db(
'cum_dist': d
})
timestr = strftime("%Y%m%d-%H%M%S")
df[' ElapsedTime (sec)'] = df['TimeStamp (sec)']
res = df.to_csv(csvfilename, index_label='index',
compression='gzip')
_ = df.to_csv(csvfilename, index_label='index', compression='gzip')
data = dataprep(df, id=workoutid, bands=False, debug=debug)
@@ -300,8 +302,6 @@ def add_c2_stroke_data_db(strokedata, workoutid, starttimeunix, csvfilename,
df.sort_values(by='TimeStamp (sec)', ascending=True)
timestr = strftime("%Y%m%d-%H%M%S")
# Create CSV file name and save data to CSV file
res = df.to_csv(csvfilename, index_label='index',
@@ -447,8 +447,7 @@ def update_empower(id, inboard, oarlength, boattype, df, f1, debug=False): # pr
if debug: # pragma: no cover
print("not updated ", id)
rowdata = dataprep(df, id=id, bands=True, barchart=True, otwpower=True,
debug=debug)
_ = dataprep(df, id=id, bands=True, barchart=True, otwpower=True, debug=debug)
row = rrdata(df=df)
row.write_csv(f1, gzip=True)
@@ -511,7 +510,7 @@ def update_workout_field_sql(workoutid, fieldname, value, debug=False):
table, fieldname, value, workoutid)
with engine.connect() as conn, conn.begin():
result = conn.execute(query)
_ = conn.execute(query)
conn.close()
engine.dispose()
@@ -527,7 +526,7 @@ def update_c2id_sql(id, c2id): # pragma: no cover
table, c2id, id)
with engine.connect() as conn, conn.begin():
result = conn.execute(query)
_ = conn.execute(query)
conn.close()
engine.dispose()
@@ -588,7 +587,7 @@ def getcpdata_sql(rower_id, table='cpdata', debug=False): # pragma: no cover
rower_id=rower_id,
table=table,
))
connection = engine.raw_connection()
_ = engine.raw_connection()
df = pd.read_sql_query(query, engine)
return df
@@ -606,7 +605,7 @@ def deletecpdata_sql(rower_id, table='cpdata', debug=False): # pragma: no cover
))
with engine.connect() as conn, conn.begin():
try:
result = conn.execute(query)
_ = conn.execute(query)
except: # pragma: no cover
print("Database locked")
conn.close()
@@ -627,7 +626,7 @@ def delete_agegroup_db(age, sex, weightcategory, debug=False):
))
with engine.connect() as conn, conn.begin():
try:
result = conn.execute(query)
_ = conn.execute(query)
except: # pragma: no cover
print("Database locked")
conn.close()
@@ -687,7 +686,7 @@ def updatecpdata_sql(rower_id, delta, cp, table='cpdata', distance=pd.Series([],
else:
engine = create_engine(database_url, echo=False)
with engine.connect() as conn, conn.begin():
with engine.connect() as conn, conn.begin():
df.to_sql(table, engine, if_exists='append', index=False)
conn.close()
engine.dispose()
@@ -818,7 +817,7 @@ def dataprep(rowdatadf, id=0, bands=True, barchart=True, otwpower=True,
if forceunit == 'lbs':
driveenergy = drivelength*averageforce*lbstoN
else: # pragma: no cover
drivenergy = drivelength*averageforce
driveenergy = drivelength*averageforce
distance = rowdatadf.loc[:, 'cum_dist']
@@ -865,7 +864,7 @@ def dataprep(rowdatadf, id=0, bands=True, barchart=True, otwpower=True,
data['hr_bottom'] = 0.0*data['hr']
try:
tel = rowdatadf.loc[:, ' ElapsedTime (sec)']
_ = rowdatadf.loc[:, ' ElapsedTime (sec)']
except KeyError: # pragma: no cover
rowdatadf[' ElapsedTime (sec)'] = rowdatadf['TimeStamp (sec)']

View File

@@ -4,7 +4,7 @@ import gzip
import shutil
import hashlib
from math import isinf, isnan
import uuid
@@ -60,7 +60,7 @@ def validate_image_extension(value):
ext = os.path.splitext(value.name)[1].lower()
valid_extension = ['.jpg', '.jpeg', '.png', '.gif']
if not ext in valid_extension: # pragma: no cover
if ext not in valid_extension: # pragma: no cover
raise ValidationError(u'File not supported')
@@ -71,7 +71,7 @@ def validate_file_extension(value):
'.CSV', '.fit', '.FIT', '.zip', '.ZIP',
'.gz', '.GZ', '.xls',
'.jpg', '.jpeg', '.tiff', '.png', '.gif', '.bmp']
if not ext in valid_extensions: # pragma: no cover
if ext not in valid_extensions: # pragma: no cover
raise ValidationError(u'File not supported!')
@@ -79,7 +79,7 @@ def must_be_csv(value):
import os
ext = os.path.splitext(value.name)[1]
valid_extensions = ['.csv', '.CSV']
if not ext in valid_extensions: # pragma: no cover
if ext not in valid_extensions: # pragma: no cover
raise ValidationError(u'File not supported!')
@@ -87,7 +87,7 @@ def validate_kml(value):
import os
ext = os.path.splitext(value.name)[1]
valid_extensions = ['.kml', '.KML']
if not ext in valid_extensions: # pragma: no cover
if ext not in valid_extensions: # pragma: no cover
raise ValidationError(u'File not supported!')
@@ -144,8 +144,6 @@ def handle_uploaded_file(f):
fname = f.name
ext = fname.split('.')[-1]
fname = '%s.%s' % (uuid.uuid4(), ext)
#timestr = uuid.uuid4().hex[:10]+'-'+time.strftime("%Y%m%d-%H%M%S")
#fname = timestr+'-'+fname
fname2 = 'media/'+fname
with open(fname2, 'wb+') as destination:
for chunk in f.chunks():