Merge branch 'release/v16.3.2'
This commit is contained in:
189
rowers/tasks.py
189
rowers/tasks.py
@@ -2804,7 +2804,10 @@ def handle_update_wps(rid,types,ids,mode,debug=False,**kwargs):
|
|||||||
mask = df['driveenergy'] > 100
|
mask = df['driveenergy'] > 100
|
||||||
except (KeyError, TypeError): # pragma: no cover
|
except (KeyError, TypeError): # pragma: no cover
|
||||||
return 0
|
return 0
|
||||||
wps_median = int(df.loc[mask,'driveenergy'].median())
|
try:
|
||||||
|
wps_median = int(df.loc[mask,'driveenergy'].median())
|
||||||
|
except ValueError:
|
||||||
|
return 0
|
||||||
|
|
||||||
if mode == 'water':
|
if mode == 'water':
|
||||||
query = "UPDATE `rowers_rower` SET `median_wps` = '%s' WHERE `id` = '%s'" % (wps_median,rid)
|
query = "UPDATE `rowers_rower` SET `median_wps` = '%s' WHERE `id` = '%s'" % (wps_median,rid)
|
||||||
@@ -2854,7 +2857,7 @@ def handle_rp3_async_workout(userid,rp3token,rp3id,startdatetime,max_attempts,de
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
workout_download_details = pd.json_normalize(response.json()['data']['download'])
|
workout_download_details = pd.json_normalize(response.json()['data']['download'])
|
||||||
except JSONDecodeError: # pragma: no cover
|
except: # pragma: no cover
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if workout_download_details.iat[0,1] == 'ready':
|
if workout_download_details.iat[0,1] == 'ready':
|
||||||
@@ -3018,6 +3021,57 @@ def handle_c2_getworkout(userid,c2token,c2id,defaulttimezone,debug=False,**kwarg
|
|||||||
|
|
||||||
return handle_c2_async_workout(alldata,userid,c2token,c2id,0,defaulttimezone)
|
return handle_c2_async_workout(alldata,userid,c2token,c2id,0,defaulttimezone)
|
||||||
|
|
||||||
|
def df_from_summary(data):
|
||||||
|
distance = data['distance']
|
||||||
|
c2id = data['id']
|
||||||
|
workouttype = data['type']
|
||||||
|
verified = data['verified']
|
||||||
|
weightclass = data['weight_class']
|
||||||
|
try:
|
||||||
|
title = data['name']
|
||||||
|
except KeyError:
|
||||||
|
title = ""
|
||||||
|
try:
|
||||||
|
t = data['comments'].split('\n', 1)[0]
|
||||||
|
title += t[:40]
|
||||||
|
except: # pragma: no cover
|
||||||
|
title = ''
|
||||||
|
|
||||||
|
weightcategory = 'hwt'
|
||||||
|
if weightclass == "L":
|
||||||
|
weightcategory = 'lwt'
|
||||||
|
|
||||||
|
startdatetime,starttime,workoutdate,duration,starttimeunix,timezone = utils.get_startdatetime_from_c2data(data)
|
||||||
|
|
||||||
|
splits = data['workout']['splits']
|
||||||
|
time = starttimeunix
|
||||||
|
elapsed_distance = 0
|
||||||
|
times = [0]
|
||||||
|
distances = [0]
|
||||||
|
spms = [splits[0]['stroke_rate']]
|
||||||
|
hrs = [splits[0]['heart_rate']['average']]
|
||||||
|
|
||||||
|
for split in splits:
|
||||||
|
time += split['time']/10.
|
||||||
|
elapsed_distance += split['distance']
|
||||||
|
times.append(time)
|
||||||
|
distances.append(elapsed_distance)
|
||||||
|
spms.append(split['stroke_rate'])
|
||||||
|
hrs.append(split['heart_rate']['average'])
|
||||||
|
|
||||||
|
df = pd.DataFrame({
|
||||||
|
'TimeStamp (sec)': times,
|
||||||
|
' Horizontal (meters)': distances,
|
||||||
|
' HRCur (bpm)': hrs,
|
||||||
|
' Cadence (stokes/min)': spms,
|
||||||
|
})
|
||||||
|
|
||||||
|
df[' ElapsedTime (sec)'] = df['TimeStamp (sec)']-starttimeunix
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def handle_c2_async_workout(alldata,userid,c2token,c2id,delaysec,defaulttimezone,debug=False,**kwargs):
|
def handle_c2_async_workout(alldata,userid,c2token,c2id,delaysec,defaulttimezone,debug=False,**kwargs):
|
||||||
time.sleep(delaysec)
|
time.sleep(delaysec)
|
||||||
@@ -3034,6 +3088,13 @@ def handle_c2_async_workout(alldata,userid,c2token,c2id,delaysec,defaulttimezone
|
|||||||
|
|
||||||
weightclass = data['weight_class']
|
weightclass = data['weight_class']
|
||||||
|
|
||||||
|
try:
|
||||||
|
has_strokedata = data['stroke_data']
|
||||||
|
except KeyError:
|
||||||
|
has_strokedata = True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
s = 'User {userid}, C2 ID {c2id}'.format(userid=userid,c2id=c2id)
|
s = 'User {userid}, C2 ID {c2id}'.format(userid=userid,c2id=c2id)
|
||||||
dologging('debuglog.log',s)
|
dologging('debuglog.log',s)
|
||||||
dologging('debuglog.log',json.dumps(data))
|
dologging('debuglog.log',json.dumps(data))
|
||||||
@@ -3058,7 +3119,7 @@ def handle_c2_async_workout(alldata,userid,c2token,c2id,delaysec,defaulttimezone
|
|||||||
startdatetime,starttime,workoutdate,duration,starttimeunix,timezone = utils.get_startdatetime_from_c2data(data)
|
startdatetime,starttime,workoutdate,duration,starttimeunix,timezone = utils.get_startdatetime_from_c2data(data)
|
||||||
|
|
||||||
|
|
||||||
s = 'Time zone {timezone}, stardatetime {startdatetime}, duration {duration}'.format(
|
s = 'Time zone {timezone}, startdatetime {startdatetime}, duration {duration}'.format(
|
||||||
timezone=timezone,startdatetime=startdatetime,
|
timezone=timezone,startdatetime=startdatetime,
|
||||||
duration=duration)
|
duration=duration)
|
||||||
dologging('debuglog.log',s)
|
dologging('debuglog.log',s)
|
||||||
@@ -3081,83 +3142,91 @@ def handle_c2_async_workout(alldata,userid,c2token,c2id,delaysec,defaulttimezone
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
if s.status_code != 200: # pragma: no cover
|
if s.status_code != 200: # pragma: no cover
|
||||||
return 0
|
dologging('debuglog.log','No Stroke Data. Status Code {code}'.format(code=s.status_code))
|
||||||
|
dologging('debuglog.log',s.text)
|
||||||
|
has_strokedata = False
|
||||||
|
|
||||||
strokedata = pd.DataFrame.from_dict(s.json()['data'])
|
if not has_strokedata:
|
||||||
|
df = df_from_summary(data)
|
||||||
|
else:
|
||||||
|
dologging('debuglog.log',json.dumps(s.json()))
|
||||||
|
|
||||||
res = make_cumvalues(0.1*strokedata['t'])
|
strokedata = pd.DataFrame.from_dict(s.json()['data'])
|
||||||
cum_time = res[0]
|
|
||||||
lapidx = res[1]
|
res = make_cumvalues(0.1*strokedata['t'])
|
||||||
|
cum_time = res[0]
|
||||||
|
lapidx = res[1]
|
||||||
|
|
||||||
|
|
||||||
unixtime = cum_time+starttimeunix
|
unixtime = cum_time+starttimeunix
|
||||||
# unixtime[0] = starttimeunix
|
# unixtime[0] = starttimeunix
|
||||||
seconds = 0.1*strokedata.loc[:,'t']
|
seconds = 0.1*strokedata.loc[:,'t']
|
||||||
|
|
||||||
nr_rows = len(unixtime)
|
nr_rows = len(unixtime)
|
||||||
|
|
||||||
try: # pragma: no cover
|
try: # pragma: no cover
|
||||||
latcoord = strokedata.loc[:,'lat']
|
latcoord = strokedata.loc[:,'lat']
|
||||||
loncoord = strokedata.loc[:,'lon']
|
loncoord = strokedata.loc[:,'lon']
|
||||||
except:
|
except:
|
||||||
latcoord = np.zeros(nr_rows)
|
latcoord = np.zeros(nr_rows)
|
||||||
loncoord = np.zeros(nr_rows)
|
loncoord = np.zeros(nr_rows)
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
strokelength = strokedata.loc[:,'strokelength']
|
strokelength = strokedata.loc[:,'strokelength']
|
||||||
except: # pragma: no cover
|
except: # pragma: no cover
|
||||||
strokelength = np.zeros(nr_rows)
|
strokelength = np.zeros(nr_rows)
|
||||||
|
|
||||||
dist2 = 0.1*strokedata.loc[:,'d']
|
dist2 = 0.1*strokedata.loc[:,'d']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
spm = strokedata.loc[:,'spm']
|
spm = strokedata.loc[:,'spm']
|
||||||
except KeyError: # pragma: no cover
|
except KeyError: # pragma: no cover
|
||||||
spm = 0*dist2
|
spm = 0*dist2
|
||||||
|
|
||||||
try:
|
try:
|
||||||
hr = strokedata.loc[:,'hr']
|
hr = strokedata.loc[:,'hr']
|
||||||
except KeyError: # pragma: no cover
|
except KeyError: # pragma: no cover
|
||||||
hr = 0*spm
|
hr = 0*spm
|
||||||
|
|
||||||
pace = strokedata.loc[:,'p']/10.
|
pace = strokedata.loc[:,'p']/10.
|
||||||
pace = np.clip(pace,0,1e4)
|
pace = np.clip(pace,0,1e4)
|
||||||
pace = pace.replace(0,300)
|
pace = pace.replace(0,300)
|
||||||
|
|
||||||
velo = 500./pace
|
velo = 500./pace
|
||||||
power = 2.8*velo**3
|
power = 2.8*velo**3
|
||||||
if workouttype == 'bike': # pragma: no cover
|
if workouttype == 'bike': # pragma: no cover
|
||||||
velo = 1000./pace
|
velo = 1000./pace
|
||||||
|
|
||||||
dologging('debuglog.log','Unix Time Stamp {s}'.format(s=unixtime[0]))
|
dologging('debuglog.log','Unix Time Stamp {s}'.format(s=unixtime[0]))
|
||||||
|
|
||||||
df = pd.DataFrame({'TimeStamp (sec)':unixtime,
|
df = pd.DataFrame({'TimeStamp (sec)':unixtime,
|
||||||
' Horizontal (meters)': dist2,
|
' Horizontal (meters)': dist2,
|
||||||
' Cadence (stokes/min)':spm,
|
' Cadence (stokes/min)':spm,
|
||||||
' HRCur (bpm)':hr,
|
' HRCur (bpm)':hr,
|
||||||
' longitude':loncoord,
|
' longitude':loncoord,
|
||||||
' latitude':latcoord,
|
' latitude':latcoord,
|
||||||
' Stroke500mPace (sec/500m)':pace,
|
' Stroke500mPace (sec/500m)':pace,
|
||||||
' Power (watts)':power,
|
' Power (watts)':power,
|
||||||
' DragFactor':np.zeros(nr_rows),
|
' DragFactor':np.zeros(nr_rows),
|
||||||
' DriveLength (meters)':np.zeros(nr_rows),
|
' DriveLength (meters)':np.zeros(nr_rows),
|
||||||
' StrokeDistance (meters)':strokelength,
|
' StrokeDistance (meters)':strokelength,
|
||||||
' DriveTime (ms)':np.zeros(nr_rows),
|
' DriveTime (ms)':np.zeros(nr_rows),
|
||||||
' StrokeRecoveryTime (ms)':np.zeros(nr_rows),
|
' StrokeRecoveryTime (ms)':np.zeros(nr_rows),
|
||||||
' AverageDriveForce (lbs)':np.zeros(nr_rows),
|
' AverageDriveForce (lbs)':np.zeros(nr_rows),
|
||||||
' PeakDriveForce (lbs)':np.zeros(nr_rows),
|
' PeakDriveForce (lbs)':np.zeros(nr_rows),
|
||||||
' lapIdx':lapidx,
|
' lapIdx':lapidx,
|
||||||
' WorkoutState': 4,
|
' WorkoutState': 4,
|
||||||
' ElapsedTime (sec)':seconds,
|
' ElapsedTime (sec)':seconds,
|
||||||
'cum_dist': dist2
|
'cum_dist': dist2
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
df.sort_values(by='TimeStamp (sec)',ascending=True)
|
df.sort_values(by='TimeStamp (sec)',ascending=True)
|
||||||
|
|
||||||
res = df.to_csv(csvfilename,index_label='index',
|
res = df.to_csv(csvfilename,index_label='index',
|
||||||
compression='gzip')
|
compression='gzip'
|
||||||
|
)
|
||||||
|
|
||||||
uploadoptions = {
|
uploadoptions = {
|
||||||
'secret':UPLOAD_SERVICE_SECRET,
|
'secret':UPLOAD_SERVICE_SECRET,
|
||||||
|
|||||||
@@ -348,6 +348,27 @@ class C2Objects(DjangoTestCase):
|
|||||||
self.assertEqual(got, want)
|
self.assertEqual(got, want)
|
||||||
self.assertEqual(workoutdate,'2021-05-23')
|
self.assertEqual(workoutdate,'2021-05-23')
|
||||||
|
|
||||||
|
def test_c2_import_54583351(self):
|
||||||
|
with open('rowers/tests/testdata/c2_54583351.json','r') as infile:
|
||||||
|
data = json.load(infile)
|
||||||
|
(
|
||||||
|
startdatetime,
|
||||||
|
starttime,
|
||||||
|
workoutdate,
|
||||||
|
duration,
|
||||||
|
starttimeunix,
|
||||||
|
timezone
|
||||||
|
) = utils.get_startdatetime_from_c2data(data)
|
||||||
|
|
||||||
|
|
||||||
|
self.assertEqual(str(timezone),'UTC')
|
||||||
|
|
||||||
|
got = arrow.get(startdatetime).isoformat()
|
||||||
|
want = arrow.get('2021-05-26 08:59:34.000000+00:00').isoformat()
|
||||||
|
|
||||||
|
self.assertEqual(got, want)
|
||||||
|
self.assertEqual(workoutdate,'2021-05-26')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@patch('rowers.c2stuff.requests.get', side_effect=mocked_requests)
|
@patch('rowers.c2stuff.requests.get', side_effect=mocked_requests)
|
||||||
|
|||||||
Reference in New Issue
Block a user