3629 lines
102 KiB
Python
3629 lines
102 KiB
Python
from __future__ import absolute_import
|
|
from __future__ import division
|
|
from __future__ import print_function
|
|
from __future__ import unicode_literals
|
|
|
|
""" Background tasks done by Celery (develop) or QR (production) """
|
|
import os
|
|
import time
|
|
import gc
|
|
import gzip
|
|
import shutil
|
|
import numpy as np
|
|
import re
|
|
import sys
|
|
import json
|
|
|
|
from scipy import optimize
|
|
from scipy.signal import savgol_filter
|
|
from scipy.interpolate import griddata
|
|
|
|
import rowingdata
|
|
from rowingdata import make_cumvalues
|
|
from uuid import uuid4
|
|
from rowingdata import rowingdata as rdata
|
|
from datetime import timedelta
|
|
from sqlalchemy import create_engine
|
|
|
|
|
|
#from celery import app
|
|
from rowers.celery import app
|
|
from celery import shared_task
|
|
|
|
import datetime
|
|
import pytz
|
|
import iso8601
|
|
from iso8601 import ParseError
|
|
|
|
from json.decoder import JSONDecodeError
|
|
from pytz.exceptions import UnknownTimeZoneError
|
|
|
|
from matplotlib.backends.backend_agg import FigureCanvas
|
|
#from matplotlib.backends.backend_cairo import FigureCanvasCairo as FigureCanvas
|
|
import matplotlib.pyplot as plt
|
|
from matplotlib import path
|
|
|
|
import grpc
|
|
import rowers.otw_power_calculator_pb2 as calculator_pb2
|
|
import rowers.otw_power_calculator_pb2_grpc as calculator_pb2_grpc
|
|
import rowers.rowing_workout_metrics_pb2 as metrics_pb2
|
|
import rowers.rowing_workout_metrics_pb2_grpc as metrics_pb2_grpc
|
|
|
|
from rowsandall_app.settings import SITE_URL
|
|
from rowsandall_app.settings_dev import SITE_URL as SITE_URL_DEV
|
|
from rowsandall_app.settings import PROGRESS_CACHE_SECRET
|
|
from rowsandall_app.settings import SETTINGS_NAME
|
|
from rowsandall_app.settings import workoutemailbox
|
|
from rowsandall_app.settings import UPLOAD_SERVICE_SECRET, UPLOAD_SERVICE_URL
|
|
from rowsandall_app.settings import NK_API_LOCATION
|
|
|
|
from requests_oauthlib import OAuth1, OAuth1Session
|
|
|
|
import pandas as pd
|
|
|
|
from django_rq import job
|
|
from django.utils import timezone
|
|
from django.utils.html import strip_tags
|
|
|
|
from rowers.utils import deserialize_list,ewmovingaverage,wavg,dologging
|
|
import rowers.utils as utils
|
|
from rowers.emails import htmlstrip
|
|
from rowers import mytypes
|
|
|
|
|
|
|
|
from rowers.dataprepnodjango import (
|
|
update_strokedata,
|
|
getsmallrowdata_db, updatecpdata_sql,update_c2id_sql,
|
|
update_workout_field_sql,
|
|
update_agegroup_db,
|
|
add_c2_stroke_data_db,totaltime_sec_to_string,
|
|
create_c2_stroke_data_db,update_empower,
|
|
database_url_debug,database_url,dataprep,
|
|
# create_strava_stroke_data_db
|
|
)
|
|
|
|
|
|
from rowers.opaque import encoder
|
|
|
|
from django.core.mail import (
|
|
send_mail,
|
|
EmailMessage,EmailMultiAlternatives,
|
|
)
|
|
|
|
from django.template import Context
|
|
from django.db.utils import OperationalError
|
|
from jinja2 import Template,Environment,FileSystemLoader
|
|
env = Environment(loader = FileSystemLoader(["rowers/templates"]))
|
|
|
|
import rowers.datautils as datautils
|
|
import rowers.utils as utils
|
|
import requests
|
|
import rowers.longtask as longtask
|
|
import arrow
|
|
import stravalib
|
|
from stravalib.exc import ActivityUploadFailed
|
|
|
|
from rowers.utils import get_strava_stream
|
|
|
|
def safetimedelta(x):
|
|
try:
|
|
return timedelta(seconds=x)
|
|
except ValueError:
|
|
return timedelta(seconds=0)
|
|
|
|
siteurl = SITE_URL
|
|
|
|
from rowers.nkimportutils import (
|
|
get_nk_summary, get_nk_allstats, get_nk_intervalstats,getdict,strokeDataToDf,
|
|
add_workout_from_data
|
|
)
|
|
|
|
# testing task
|
|
|
|
from rowers.emails import send_template_email
|
|
from rowers.courseutils import (
|
|
coursetime_paths, coursetime_first, time_in_path,
|
|
InvalidTrajectoryError
|
|
)
|
|
|
|
# Concept2 logbook sends over split data for each interval
|
|
# We use it here to generate a custom summary
|
|
# Some users complained about small differences
|
|
def summaryfromsplitdata(splitdata,data,filename,sep='|',workouttype='rower'):
|
|
workouttype = workouttype.lower()
|
|
|
|
totaldist = data['distance']
|
|
totaltime = data['time']/10.
|
|
try:
|
|
spm = data['stroke_rate']
|
|
except KeyError:
|
|
spm = 0
|
|
try:
|
|
resttime = data['rest_time']/10.
|
|
except KeyError: # pragma: no cover
|
|
resttime = 0
|
|
try:
|
|
restdistance = data['rest_distance']
|
|
except KeyError: # pragma: no cover
|
|
restdistance = 0
|
|
try:
|
|
avghr = data['heart_rate']['average']
|
|
except KeyError: # pragma: no cover
|
|
avghr = 0
|
|
try:
|
|
maxhr = data['heart_rate']['max']
|
|
except KeyError: # pragma: no cover
|
|
maxhr = 0
|
|
|
|
try:
|
|
avgpace = 500.*totaltime/totaldist
|
|
except (ZeroDivisionError,OverflowError): # pragma: no cover
|
|
avgpace = 0.
|
|
|
|
try:
|
|
restpace = 500.*resttime/restdistance
|
|
except (ZeroDivisionError,OverflowError): # pragma: no cover
|
|
restpace = 0.
|
|
|
|
velo = totaldist/totaltime
|
|
avgpower = 2.8*velo**(3.0)
|
|
if workouttype in ['bike','bikeerg']: # pragma: no cover
|
|
velo = velo/2.
|
|
avgpower = 2.8*velo**(3.0)
|
|
velo = velo*2
|
|
|
|
|
|
try:
|
|
restvelo = restdistance/resttime
|
|
except (ZeroDivisionError,OverflowError): # pragma: no cover
|
|
restvelo = 0
|
|
|
|
restpower = 2.8*restvelo**(3.0)
|
|
if workouttype in ['bike','bikeerg']: # pragma: no cover
|
|
restvelo = restvelo/2.
|
|
restpower = 2.8*restvelo**(3.0)
|
|
restvelo = restvelo*2
|
|
|
|
try:
|
|
avgdps = totaldist/data['stroke_count']
|
|
except (ZeroDivisionError,OverflowError,KeyError):
|
|
avgdps = 0
|
|
|
|
from rowingdata import summarystring,workstring,interval_string
|
|
|
|
|
|
sums = summarystring(totaldist,totaltime,avgpace,spm,avghr,maxhr,
|
|
avgdps,avgpower,readFile=filename,
|
|
separator=sep)
|
|
|
|
sums += workstring(totaldist,totaltime,avgpace,spm,avghr,maxhr,
|
|
avgdps,avgpower,separator=sep,symbol='W')
|
|
|
|
sums += workstring(restdistance,resttime,restpace,0,0,0,0,restpower,
|
|
separator=sep,
|
|
symbol='R')
|
|
|
|
sums += '\nWorkout Details\n'
|
|
sums += '#-{sep}SDist{sep}-Split-{sep}-SPace-{sep}-Pwr-{sep}SPM-{sep}AvgHR{sep}MaxHR{sep}DPS-\n'.format(
|
|
sep=sep
|
|
)
|
|
|
|
intervalnr=0
|
|
sa = []
|
|
results = []
|
|
|
|
try:
|
|
timebased = data['workout_type'] in ['FixedTimeSplits','FixedTimeInterval']
|
|
except KeyError: # pragma: no cover
|
|
timebased = False
|
|
|
|
for interval in splitdata:
|
|
try:
|
|
idist = interval['distance']
|
|
except KeyError: # pragma: no cover
|
|
idist = 0
|
|
|
|
try:
|
|
itime = interval['time']/10.
|
|
except KeyError: # pragma: no cover
|
|
itime = 0
|
|
try:
|
|
ipace = 500.*itime/idist
|
|
except (ZeroDivisionError,OverflowError): # pragma: no cover
|
|
ipace = 180.
|
|
|
|
try:
|
|
ispm = interval['stroke_rate']
|
|
except KeyError: # pragma: no cover
|
|
ispm = 0
|
|
try:
|
|
irest_time = interval['rest_time']/10.
|
|
except KeyError: # pragma: no cover
|
|
irest_time = 0
|
|
try:
|
|
iavghr = interval['heart_rate']['average']
|
|
except KeyError: # pragma: no cover
|
|
iavghr = 0
|
|
try:
|
|
imaxhr = interval['heart_rate']['average']
|
|
except KeyError: # pragma: no cover
|
|
imaxhr = 0
|
|
|
|
# create interval values
|
|
iarr = [idist,'meters','work']
|
|
resarr = [itime]
|
|
if timebased: # pragma: no cover
|
|
iarr = [itime,'seconds','work']
|
|
resarr = [idist]
|
|
|
|
if irest_time > 0:
|
|
iarr += [irest_time,'seconds','rest']
|
|
try:
|
|
resarr += [interval['rest_distance']]
|
|
except KeyError:
|
|
resarr += [np.nan]
|
|
|
|
sa += iarr
|
|
results += resarr
|
|
|
|
if itime != 0:
|
|
ivelo = idist/itime
|
|
ipower = 2.8*ivelo**(3.0)
|
|
if workouttype in ['bike','bikeerg']: # pragma: no cover
|
|
ipower = 2.8*(ivelo/2.)**(3.0)
|
|
else: # pragma: no cover
|
|
ivelo = 0
|
|
ipower = 0
|
|
|
|
sums += interval_string(intervalnr,idist,itime,ipace,ispm,
|
|
iavghr,imaxhr,0,ipower,separator=sep)
|
|
intervalnr+=1
|
|
|
|
return sums,sa,results
|
|
|
|
|
|
@app.task
|
|
def add(x, y): # pragma: no cover
|
|
return x + y
|
|
|
|
|
|
@app.task
|
|
def handle_c2_sync(workoutid,url,headers,data,debug=False,**kwargs):
|
|
response = requests.post(url,headers=headers,data=data)
|
|
if response.status_code not in [200,201]: # pragma: no cover
|
|
return 0
|
|
|
|
s = response.json()
|
|
c2id = s['data']['id']
|
|
|
|
res = update_workout_field_sql(workoutid,'uploadedtoc2',c2id,debug=debug)
|
|
|
|
return res
|
|
|
|
@app.task
|
|
def handle_sporttracks_sync(workoutid,url,headers,data,debug=False,**kwargs):
|
|
response = requests.post(url,headers=headers,data=data)
|
|
if response.status_code not in [200,201]: # pragma: no cover
|
|
return 0
|
|
|
|
t = response.json()
|
|
uri = t['uris'][0]
|
|
regex = '.*?sporttracks\.mobi\/api\/v2\/fitnessActivities/(\d+)\.json$'
|
|
m = re.compile(regex).match(uri).group(1)
|
|
|
|
id = int(m)
|
|
|
|
res = update_workout_field_sql(workoutid,'uploadedtosporttracks',id,debug=debug)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_strava_sync(stravatoken,workoutid,filename,name,activity_type,description,debug=False,**kwargs):
|
|
client = stravalib.Client(access_token=stravatoken)
|
|
failed = False
|
|
with open(filename,'rb') as f:
|
|
try:
|
|
act = client.upload_activity(f,'tcx.gz',name=name)
|
|
try:
|
|
res = act.wait(poll_interval=5.0, timeout=60)
|
|
except stravalib.exc.ActivityUploadFailed: # pragma: no cover
|
|
failed = True
|
|
except JSONDecodeError: # pragma: no cover
|
|
failed = True
|
|
except stravalib.exc.ObjectNotFound: # pragma: no cover
|
|
failed = True
|
|
except ActivityUploadFailed: # pragma: no cover
|
|
failed = True
|
|
|
|
if not failed:
|
|
try:
|
|
act = client.update_activity(res.id,activity_type=activity_type,
|
|
description=description,device_name='Rowsandall.com')
|
|
except TypeError: # pragma: no cover
|
|
act = client.update_activity(res.id,activity_type=activity_type,
|
|
description=description)
|
|
except: # pragma: no cover
|
|
e = sys.exc_info()[0]
|
|
t = time.localtime()
|
|
timestamp = bytes('{t}'.format(t=time.strftime('%b-%d-%Y_%H%M', t)),'utf-8')
|
|
with open('stravalog.log','ab') as f:
|
|
f.write(b'\n')
|
|
f.write(timestamp)
|
|
f.write(str(e))
|
|
|
|
result = update_workout_field_sql(workoutid,'uploadedtostrava',res.id,debug=debug)
|
|
try:
|
|
os.remove(filename)
|
|
except: # pragma: no cover
|
|
pass
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_c2_import_stroke_data(c2token,
|
|
c2id,workoutid,
|
|
starttimeunix,
|
|
csvfilename,debug=True,**kwargs):
|
|
|
|
if 'workouttype' in kwargs: # pragma: no cover
|
|
workouttype = kwargs['workouttype']
|
|
else:
|
|
workouttype = 'rower'
|
|
|
|
authorizationstring = str('Bearer ' + c2token)
|
|
headers = {'Authorization': authorizationstring,
|
|
'user-agent': 'sanderroosendaal',
|
|
'Content-Type': 'application/json'}
|
|
url = "https://log.concept2.com/api/users/me/results/"+str(c2id)+"/strokes"
|
|
s = requests.get(url,headers=headers)
|
|
if s.status_code == 200:
|
|
strokedata = pd.DataFrame.from_dict(s.json()['data'])
|
|
result = add_c2_stroke_data_db(
|
|
strokedata,workoutid,starttimeunix,
|
|
csvfilename,debug=debug,workouttype=workouttype
|
|
)
|
|
|
|
return 1
|
|
else: # pragma: no cover
|
|
url = "https://log.concept2.com/api/users/me/results/{id}".format(id=c2id)
|
|
|
|
s = requests.get(url,headers=headers)
|
|
|
|
if s.status_code == 200:
|
|
workoutdata = s.json()['data']
|
|
distance = workoutdata['distance']
|
|
c2id = workoutdata['id']
|
|
workouttype = workoutdata['type']
|
|
verified = workoutdata['verified']
|
|
startdatetime = iso8601.parse_date(workoutdata['date'])
|
|
weightclass = workoutdata['weight_class']
|
|
weightcategory = 'hwt'
|
|
if weightclass == "L":
|
|
weightcategory = 'lwt'
|
|
totaltime = workoutdata['time']/10.
|
|
duration = totaltime_sec_to_string(totaltime)
|
|
duration = datetime.datetime.strptime(duration,'%H:%M:%S.%f').time()
|
|
|
|
try:
|
|
timezone_str = workoutdata['timezone']
|
|
except:
|
|
timezone_str = 'UTC'
|
|
|
|
workoutdate = startdatetime.astimezone(
|
|
pytz.timezone(timezone_str)
|
|
).strftime('%Y-%m-%d')
|
|
starttime = startdatetime.astimezone(
|
|
pytz.timezone(timezone_str)
|
|
).strftime('%H:%M:%S')
|
|
|
|
result = create_c2_stroke_data_db(
|
|
distance,duration,workouttype,
|
|
workoutid,starttimeunix,
|
|
csvfilename,debug=debug,
|
|
)
|
|
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
return 0 # pragma: no cover
|
|
|
|
def getagegrouprecord(age,sex='male',weightcategory='hwt',
|
|
distance=2000,duration=None,indf=pd.DataFrame()):
|
|
|
|
power = 0
|
|
if not duration:
|
|
try:
|
|
df = indf[indf['distance'] == distance]
|
|
except KeyError: # pragma: no cover
|
|
df = pd.DataFrame()
|
|
else:
|
|
duration = 60*int(duration)
|
|
try:
|
|
df = indf[indf['duration'] == duration]
|
|
except KeyError: # pragma: no cover
|
|
df = pd.DataFrame()
|
|
|
|
if not df.empty:
|
|
ages = df['age']
|
|
powers = df['power']
|
|
|
|
#poly_coefficients = np.polyfit(ages,powers,6)
|
|
fitfunc = lambda pars, x: np.abs(pars[0])*(1-x/max(120,pars[1]))-np.abs(pars[2])*np.exp(-x/np.abs(pars[3]))+np.abs(pars[4])*(np.sin(np.pi*x/max(50,pars[5])))
|
|
errfunc = lambda pars, x,y: fitfunc(pars,x)-y
|
|
|
|
p0 = [700,120,700,10,100,100]
|
|
|
|
p1, success = optimize.leastsq(errfunc,p0[:],
|
|
args = (ages,powers))
|
|
|
|
if success and age is not None:
|
|
power = fitfunc(p1, float(age))
|
|
|
|
#power = np.polyval(poly_coefficients,age)
|
|
|
|
power = 0.5*(np.abs(power)+power)
|
|
elif age is not None: # pragma: no cover
|
|
new_age = np.range([age])
|
|
ww = griddata(ages.values,
|
|
powers.values,
|
|
new_age,method='linear',rescale=True)
|
|
power = 0.5*(np.abs(power)+power)
|
|
else: # pragma: no cover
|
|
power = 0
|
|
|
|
return power
|
|
|
|
def polygon_to_path(polygon,debug=True):
|
|
pid = polygon[0]
|
|
query = "SELECT id, latitude, longitude FROM rowers_geopoint WHERE polygon_id = {pid} ORDER BY order_in_poly ASC".format(
|
|
pid=pid
|
|
)
|
|
if debug:
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else: # pragma: no cover
|
|
engine = create_engine(database_url, echo=False)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
points = result.fetchall()
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
s = []
|
|
|
|
for point in points:
|
|
s.append([point[1],point[2]])
|
|
|
|
p = path.Path(s[:-1])
|
|
|
|
return p
|
|
|
|
@app.task(bind=True)
|
|
def handle_check_race_course(self,
|
|
f1,workoutid,courseid,
|
|
recordid,useremail,userfirstname,
|
|
**kwargs): # pragma: no cover
|
|
|
|
logfile = 'courselog_{workoutid}_{courseid}.log'.format(workoutid=workoutid,courseid=courseid)
|
|
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
if 'splitsecond' in kwargs: # pragma: no cover
|
|
splitsecond = kwargs['splitsecond']
|
|
else:
|
|
splitsecond = 0
|
|
|
|
if 'referencespeed' in kwargs: # pragma: no cover
|
|
referencespeed = kwargs['referencespeed']
|
|
else:
|
|
referencespeed = 5.0
|
|
|
|
if 'coursedistance' in kwargs: # pragma: no cover
|
|
coursedistance = kwargs['coursedistance']
|
|
else:
|
|
coursedistance = 0
|
|
|
|
mode = 'race'
|
|
if 'mode' in kwargs: # pragma: no cover
|
|
mode = kwargs['mode']
|
|
|
|
columns = ['time',' latitude',' longitude','cum_dist']
|
|
|
|
try:
|
|
row = rdata(csvfile=f1)
|
|
except IOError: # pragma: no cover
|
|
try:
|
|
row = rdata(csvfile=f1 + '.csv')
|
|
except IOError: # pragma: no cover
|
|
try:
|
|
row = rdata(csvfile=f1 + '.gz')
|
|
except IOError: # pragma: no cover
|
|
return 0
|
|
|
|
|
|
row.extend_data()
|
|
|
|
#row.df.interpolate(inplace=True)
|
|
|
|
row.calc_dist_from_gps()
|
|
rowdata = row.df
|
|
rowdata['cum_dist'] = rowdata['gps_dist_calculated']
|
|
|
|
try:
|
|
s = rowdata[' latitude']
|
|
except KeyError: # pragma: no cover
|
|
return 0
|
|
|
|
rowdata.rename(columns = {
|
|
' latitude':'latitude',
|
|
' longitude':'longitude',
|
|
'TimeStamp (sec)': 'time',
|
|
}, inplace=True)
|
|
|
|
rowdata.fillna(method='backfill',inplace=True)
|
|
|
|
rowdata['time'] = rowdata['time']-rowdata.loc[0,'time']
|
|
rowdata = rowdata[rowdata['time']>splitsecond]
|
|
# we may want to expand the time (interpolate)
|
|
rowdata['dt'] = rowdata['time'].apply(
|
|
lambda x: safetimedelta(x)
|
|
)
|
|
|
|
|
|
rowdata = rowdata.resample('100ms',on='dt').mean()
|
|
rowdata = rowdata.interpolate()
|
|
|
|
# initiate database engine
|
|
|
|
if debug: # pragma: no cover
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else:
|
|
engine = create_engine(database_url, echo=False)
|
|
|
|
|
|
# get polygons
|
|
query = "SELECT id,name FROM rowers_geopolygon WHERE course_id = {courseid} ORDER BY order_in_course ASC".format(
|
|
courseid=courseid
|
|
)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
polygons = result.fetchall()
|
|
|
|
conn.close()
|
|
|
|
engine.dispose()
|
|
|
|
paths = []
|
|
for polygon in polygons:
|
|
path = polygon_to_path(polygon,debug=debug)
|
|
paths.append(path)
|
|
|
|
startsecond = 0
|
|
endsecond = rowdata['time'].max()
|
|
|
|
# check how many times went through start polygon
|
|
try:
|
|
try:
|
|
entrytimes,entrydistances = time_in_path(rowdata,paths[0],maxmin='max',getall=True,
|
|
name=polygons[0].name,logfile=logfile)
|
|
except AttributeError: # for testing
|
|
entrytimes, entrydistances = time_in_path(rowdata,paths[0],maxmin='max',getall=True,
|
|
name='Start',logfile=logfile)
|
|
with open(logfile,'ab') as f:
|
|
t = time.localtime()
|
|
timestamp = bytes('{t}'.format(t=time.strftime('%b-%d-%Y_%H%M', t)),'utf-8')
|
|
f.write(b'\n')
|
|
f.write(bytes('Course id {n}, Record id {m}'.format(n=courseid,m=recordid),'utf-8'))
|
|
f.write(b'\n')
|
|
f.write(timestamp)
|
|
f.write(b' ')
|
|
f.write(bytes('Found {n} entrytimes'.format(n=len(entrytimes)),'utf-8'))
|
|
|
|
except InvalidTrajectoryError: # pragma: no cover
|
|
entrytimes = []
|
|
entrydistances = []
|
|
coursecompleted = False
|
|
coursemeters = 0
|
|
coursetimeseconds = 0
|
|
|
|
|
|
cseconds = []
|
|
cmeters = []
|
|
ccomplete = []
|
|
startseconds = []
|
|
endseconds = []
|
|
|
|
for startt in entrytimes:
|
|
with open(logfile,'ab') as f:
|
|
t = time.localtime()
|
|
timestamp = bytes('{t}'.format(t=time.strftime('%b-%d-%Y_%H%M', t)),'utf-8')
|
|
f.write(b'\n')
|
|
f.write(timestamp)
|
|
f.write(b' ')
|
|
f.write(bytes('Path starting at {t}'.format(t=startt),'utf-8'))
|
|
rowdata2 = rowdata[rowdata['time']>(startt-10.)]
|
|
|
|
(
|
|
coursetimeseconds,
|
|
coursemeters,
|
|
coursecompleted,
|
|
|
|
) = coursetime_paths(rowdata2,paths,polygons=polygons,logfile=logfile)
|
|
(
|
|
coursetimefirst,
|
|
coursemetersfirst,
|
|
firstcompleted
|
|
) = coursetime_first(
|
|
rowdata2,paths,polygons=polygons,logfile=logfile)
|
|
|
|
|
|
|
|
coursetimesecondsnet = coursetimeseconds-coursetimefirst
|
|
coursemeters = coursemeters-coursemetersfirst
|
|
|
|
|
|
cseconds.append(coursetimesecondsnet)
|
|
cmeters.append(coursemeters)
|
|
ccomplete.append(coursecompleted)
|
|
endseconds.append(coursetimeseconds)
|
|
startseconds.append(coursetimefirst)
|
|
|
|
|
|
records = pd.DataFrame({
|
|
'coursetimeseconds':cseconds,
|
|
'coursecompleted': ccomplete,
|
|
'coursemeters': cmeters,
|
|
'startsecond':startseconds,
|
|
'endsecond':endseconds,
|
|
})
|
|
|
|
records = records[records['coursecompleted'] == True]
|
|
|
|
|
|
if len(records):
|
|
coursecompleted = True
|
|
mintime = records['coursetimeseconds'].min()
|
|
coursetimeseconds = records[records['coursetimeseconds'] == mintime]['coursetimeseconds'].min()
|
|
coursemeters = records[records['coursetimeseconds'] == mintime]['coursemeters'].min()
|
|
startsecond = records[records['coursetimeseconds'] == mintime]['startsecond'].min()
|
|
endsecond = records[records['coursetimeseconds'] == mintime]['endsecond'].min()
|
|
else: # pragma: no cover
|
|
coursecompleted = False
|
|
|
|
points = 0
|
|
if coursecompleted:
|
|
if coursedistance == 0:
|
|
coursedistance = coursemeters
|
|
velo = coursedistance/coursetimeseconds
|
|
points = 100*(2.-referencespeed/velo)
|
|
query = 'UPDATE rowers_virtualraceresult SET coursecompleted = 1, duration = "{duration}", distance = {distance}, workoutid = {workoutid}, startsecond = {startsecond}, endsecond = {endsecond}, points={points} WHERE id={recordid}'.format(
|
|
recordid=recordid,
|
|
duration=totaltime_sec_to_string(coursetimeseconds),
|
|
distance=int(coursemeters),
|
|
points=points,
|
|
workoutid=workoutid,
|
|
startsecond=startsecond,
|
|
endsecond=endsecond,
|
|
)
|
|
|
|
if mode == 'coursetest':
|
|
query = 'UPDATE rowers_coursetestresult SET coursecompleted = 1, duration = "{duration}", distance = {distance}, workoutid = {workoutid}, startsecond = {startsecond}, endsecond = {endsecond}, points={points} WHERE id={recordid}'.format(
|
|
recordid=recordid,
|
|
duration=totaltime_sec_to_string(coursetimeseconds),
|
|
distance=int(coursemeters),
|
|
points=points,
|
|
workoutid=workoutid,
|
|
startsecond=startsecond,
|
|
endsecond=endsecond,
|
|
)
|
|
|
|
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
os.remove(logfile)
|
|
|
|
return 1
|
|
|
|
else: # pragma: no cover
|
|
query = 'UPDATE rowers_virtualraceresult SET coursecompleted = 0, duration = "{duration}", distance = {distance}, workoutid = {workoutid}, startsecond = {startsecond}, endsecond = {endsecond}, points={points} WHERE id={recordid}'.format(
|
|
recordid=recordid,
|
|
duration=totaltime_sec_to_string(0),
|
|
distance=0,
|
|
points=0.0,
|
|
workoutid=workoutid,
|
|
startsecond=startsecond,
|
|
endsecond=endsecond,
|
|
)
|
|
|
|
if mode == 'coursetest':
|
|
query = 'UPDATE rowers_coursetestresult SET coursecompleted = 0, duration = "{duration}", distance = {distance}, workoutid = {workoutid}, startsecond = {startsecond}, endsecond = {endsecond}, points={points} WHERE id={recordid}'.format(
|
|
recordid=recordid,
|
|
duration=totaltime_sec_to_string(0),
|
|
distance=0,
|
|
points=0,
|
|
workoutid=workoutid,
|
|
startsecond=startsecond,
|
|
endsecond=endsecond,
|
|
)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
# add times for all gates to log file
|
|
with open(logfile,'ab') as f:
|
|
t = time.localtime()
|
|
f.write(b'\n')
|
|
f.write(b' ')
|
|
f.write(b'--- LOG of all gate times---')
|
|
|
|
for path,polygon in zip(paths,polygons):
|
|
( secs,meters,completed) = coursetime_paths(rowdata,
|
|
[path],polygons=[polygon],logfile=logfile)
|
|
with open(logfile,'ab') as f:
|
|
line = " time: {t} seconds, distance: {m} meters".format(t=secs,m=meters)
|
|
f.write(bytes(line,'utf-8'))
|
|
|
|
# send email
|
|
handle_sendemail_coursefail(
|
|
useremail,userfirstname,logfile
|
|
)
|
|
os.remove(logfile)
|
|
|
|
return 2
|
|
|
|
return 0 # pragma: no cover
|
|
|
|
|
|
@app.task(bind=True)
|
|
def handle_getagegrouprecords(self,
|
|
df,
|
|
distances,durations,
|
|
age,sex,weightcategory,
|
|
**kwargs):
|
|
wcdurations = []
|
|
wcpower = []
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
|
|
df = pd.read_json(df)
|
|
|
|
if sex == 'not specified': # pragma: no cover
|
|
return 0
|
|
|
|
for distance in distances:
|
|
worldclasspower = getagegrouprecord(
|
|
age,
|
|
sex=sex,
|
|
distance=distance,
|
|
weightcategory=weightcategory,indf=df,
|
|
)
|
|
velo = (worldclasspower/2.8)**(1./3.)
|
|
if not np.isinf(worldclasspower) and not np.isnan(worldclasspower):
|
|
try:
|
|
duration = distance/velo
|
|
wcdurations.append(duration)
|
|
wcpower.append(worldclasspower)
|
|
except ZeroDivisionError: # pragma: no cover
|
|
pass
|
|
|
|
|
|
|
|
for duration in durations:
|
|
worldclasspower = getagegrouprecord(
|
|
age,
|
|
sex=sex,
|
|
duration=duration,
|
|
weightcategory=weightcategory,indf=df
|
|
)
|
|
if not np.isinf(worldclasspower) and not np.isnan(worldclasspower):
|
|
try:
|
|
velo = (worldclasspower/2.8)**(1./3.)
|
|
distance = int(60*duration*velo)
|
|
wcdurations.append(60.*duration)
|
|
wcpower.append(worldclasspower)
|
|
except ValueError: # pragma: no cover
|
|
pass
|
|
|
|
update_agegroup_db(age,sex,weightcategory,wcdurations,wcpower,
|
|
debug=debug)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_get_garmin_file(client_id,
|
|
client_secret,
|
|
garmintoken,
|
|
garminrefreshtoken,
|
|
userid,
|
|
url,
|
|
filetype,
|
|
*args,
|
|
**kwargs):
|
|
|
|
|
|
regex = '.*\?id=(\d+)'
|
|
try: # pragma: no cover
|
|
m = re.compile(regex).match(url).group(1)
|
|
garminid = int(m)
|
|
except AttributeError:
|
|
garminid = ''
|
|
|
|
|
|
garmin = OAuth1Session(client_id,
|
|
client_secret=client_secret,
|
|
resource_owner_key=garmintoken,
|
|
resource_owner_secret=garminrefreshtoken,
|
|
)
|
|
|
|
filename = 'media/{code}_{id}.'.format(
|
|
code = uuid4().hex[:16],
|
|
id = userid
|
|
)+filetype
|
|
|
|
response = garmin.get(url, stream=True)
|
|
if response.status_code == 200:
|
|
with open(filename, 'wb') as out_file:
|
|
shutil.copyfileobj(response.raw, out_file)
|
|
|
|
|
|
del response
|
|
|
|
uploadoptions = {
|
|
'secret':UPLOAD_SERVICE_SECRET,
|
|
'user':userid,
|
|
'file': filename,
|
|
'title': '',
|
|
'workouttype':'water',
|
|
'boattype':'1x',
|
|
'garminid': garminid,
|
|
}
|
|
session = requests.session()
|
|
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
|
session.headers.update(newHeaders)
|
|
response = session.post(UPLOAD_SERVICE_URL,json=uploadoptions)
|
|
|
|
|
|
return 1
|
|
|
|
@app.task(bind=True)
|
|
def long_test_task(self,aantal,debug=False,job=None,session_key=None): # pragma: no cover
|
|
job = self.request
|
|
|
|
return longtask.longtask(aantal,jobid=job.id,debug=debug,
|
|
session_key=session_key)
|
|
|
|
@app.task(bind=True)
|
|
def long_test_task2(self,aantal,**kwargs): # pragma: no cover
|
|
#debug=False,job=None,jobid='aap'):
|
|
job = self.request
|
|
job_id = job.id
|
|
|
|
if 'jobkey' in kwargs:
|
|
job_id = kwargs.pop('jobkey')
|
|
|
|
kwargs['jobid'] = job_id
|
|
|
|
return longtask.longtask2(aantal,**kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# process and update workouts
|
|
|
|
@app.task(bind=True)
|
|
def handle_update_empower(self,
|
|
useremail,
|
|
workoutdicts,
|
|
debug=False, **kwargs): # pragma: no cover
|
|
|
|
job = self.request
|
|
job_id = job.id
|
|
|
|
if 'jobkey' in kwargs:
|
|
job_id = kwargs.pop('jobkey')
|
|
|
|
aantal = len(workoutdicts)
|
|
counter = 0
|
|
|
|
for workoutdict in workoutdicts:
|
|
wid = workoutdict['id']
|
|
inboard = workoutdict['inboard']
|
|
oarlength = workoutdict['oarlength']
|
|
boattype = workoutdict['boattype']
|
|
f1 = workoutdict['filename']
|
|
|
|
# oarlength consistency checks will be done in view
|
|
|
|
havedata = 1
|
|
try:
|
|
rowdata = rdata(csvfile=f1)
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.csv')
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.gz')
|
|
except IOError:
|
|
havedata = 0
|
|
|
|
progressurl = SITE_URL
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
progressurl = SITE_URL_DEV
|
|
siteurl = SITE_URL_DEV
|
|
secret = PROGRESS_CACHE_SECRET
|
|
|
|
kwargs['job_id'] = job_id
|
|
|
|
progressurl += "/rowers/record-progress/"
|
|
progressurl += job_id
|
|
|
|
if havedata:
|
|
success = update_empower(wid, inboard, oarlength, boattype,
|
|
rowdata.df, f1, debug=debug)
|
|
|
|
counter += 1
|
|
|
|
progress = 100.*float(counter)/float(aantal)
|
|
|
|
post_data = {
|
|
"secret":secret,
|
|
"value":progress,
|
|
}
|
|
|
|
s = requests.post(progressurl, data=post_data)
|
|
status_code = s.status_code
|
|
|
|
subject = "Rowsandall.com Your Old Empower Oarlock data have been corrected"
|
|
message = """
|
|
We have updated Power and Work per Stroke data according to the instructions by Nielsen-Kellerman.
|
|
"""
|
|
|
|
email = EmailMessage(subject, message,
|
|
'Rowsandall <info@rowsandall.com>',
|
|
[useremail])
|
|
|
|
if 'emailbounced' in kwargs:
|
|
emailbounced = kwargs['emailbounced']
|
|
else:
|
|
emailbounced = False
|
|
|
|
if not emailbounced:
|
|
res = email.send()
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_calctrimp(id,
|
|
csvfilename,
|
|
ftp,
|
|
sex,
|
|
hrftp,
|
|
hrmax,
|
|
hrmin,
|
|
debug=False, **kwargs):
|
|
if debug: # pragma: no cover
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else:
|
|
engine = create_engine(database_url, echo=False)
|
|
|
|
tss = 0
|
|
normp = 0
|
|
trimp = 0
|
|
hrtss = 0
|
|
normv = 0
|
|
normw = 0
|
|
|
|
# check what the real file name is
|
|
if os.path.exists(csvfilename):
|
|
csvfile = csvfilename
|
|
elif os.path.exists(csvfilename+'.csv'): # pragma: no cover
|
|
csvfile = csvfilename+'.csv'
|
|
elif os.path.exists(csvfilename+'.gz'): # pragma: no cover
|
|
csvfile = csvfilename+'.gz'
|
|
else: # pragma: no cover
|
|
return 0
|
|
|
|
csvfile = os.path.abspath(csvfile)
|
|
|
|
with grpc.insecure_channel(
|
|
target='localhost:50052',
|
|
options=[('grpc.lb_policy_name', 'pick_first'),
|
|
('grpc.enable_retries', 0), ('grpc.keepalive_timeout_ms',
|
|
10000)]
|
|
) as channel:
|
|
try:
|
|
grpc.channel_ready_future(channel).result(timeout=10)
|
|
except grpc.FutureTimeoutError: # pragma: no cover
|
|
return 0
|
|
|
|
stub = metrics_pb2_grpc.MetricsStub(channel)
|
|
req = metrics_pb2.WorkoutMetricsRequest(
|
|
filename = csvfile,
|
|
ftp=ftp,
|
|
sex=sex,
|
|
hrftp=hrftp,
|
|
hrmax=hrmax,
|
|
hrmin=hrmin,
|
|
)
|
|
try:
|
|
response = stub.CalcMetrics(req,timeout=60)
|
|
except: # pragma: no cover
|
|
return 0
|
|
|
|
tss = response.tss
|
|
normp = response.normp
|
|
trimp = response.trimp
|
|
normv = response.normv
|
|
normw = response.normw
|
|
hrtss = response.hrtss
|
|
|
|
if np.isnan(tss): # pragma: no cover
|
|
tss = 0
|
|
|
|
if np.isnan(normp): # pragma: no cover
|
|
normp = 0
|
|
|
|
if np.isnan(trimp): # pragma: no cover
|
|
trimp = 0
|
|
|
|
if np.isnan(normv): # pragma: no cover
|
|
normv = 0
|
|
|
|
if np.isnan(normw): # pragma: no cover
|
|
normw = 0
|
|
|
|
if np.isnan(hrtss): # pragma: no cover
|
|
hrtss = 0
|
|
|
|
if tss > 1000: # pragma: no cover
|
|
tss = 0
|
|
|
|
if trimp > 1000: # pragma: no cover
|
|
trimp = 0
|
|
|
|
if normp > 2000: # pragma: no cover
|
|
normp = 0
|
|
|
|
if normv > 2000: # pragma: no cover
|
|
normv = 0
|
|
|
|
if normw > 10000: # pragma: no cover
|
|
normw = 0
|
|
|
|
if hrtss > 1000: # pragma: no cover
|
|
hrtss = 0
|
|
|
|
|
|
query = 'UPDATE rowers_workout SET rscore = {tss}, normp = {normp}, trimp={trimp}, hrtss={hrtss}, normv={normv}, normw={normw} WHERE id={id}'.format(
|
|
tss = int(tss),
|
|
normp = int(normp),
|
|
trimp = int(trimp),
|
|
hrtss = int(hrtss),
|
|
normv=normv,
|
|
normw=normw,
|
|
id = id,
|
|
)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_updatedps(useremail, workoutids, debug=False,**kwargs):
|
|
for wid, f1 in workoutids:
|
|
havedata = 1
|
|
try:
|
|
rowdata = rdata(csvfile=f1)
|
|
except IOError: # pragma: no cover
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.csv')
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.gz')
|
|
except IOError:
|
|
havedata = 0
|
|
|
|
if havedata:
|
|
update_strokedata(wid, rowdata.df, debug=debug)
|
|
|
|
|
|
subject = "Rowsandall.com Your Distance per Stroke metric has been updated"
|
|
message = "All your workouts now have Distance per Stroke"
|
|
|
|
email = EmailMessage(subject, message,
|
|
'Rowsandall <info@rowsandall.com>',
|
|
[useremail])
|
|
|
|
if 'emailbounced' in kwargs: # pragma: no cover
|
|
emailbounced = kwargs['emailbounced']
|
|
else:
|
|
emailbounced = False
|
|
|
|
if not emailbounced:
|
|
res = email.send()
|
|
|
|
return 1
|
|
|
|
import math
|
|
def sigdig(value, digits = 3):
|
|
try:
|
|
order = int(math.floor(math.log10(math.fabs(value))))
|
|
except (ValueError,TypeError): # pragma: no cover
|
|
return value
|
|
|
|
# return integers as is
|
|
if value % 1 == 0: # pragma: no cover
|
|
return value
|
|
|
|
places = digits - order - 1
|
|
if places > 0:
|
|
fmtstr = "%%.%df" % (places)
|
|
else:
|
|
fmtstr = "%.0f"
|
|
return fmtstr % (round(value, places))
|
|
|
|
|
|
|
|
@app.task
|
|
def handle_send_email_alert(
|
|
useremail, userfirstname, userlastname, rowerfirstname, alertname, stats, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
if 'othertexts' in kwargs: # pragma: no cover
|
|
othertexts = kwargs['othertexts']
|
|
else:
|
|
othertexts = None
|
|
|
|
report = {}
|
|
try:
|
|
report['Percentage'] = int(stats['percentage'])
|
|
except KeyError: # pragma: no cover
|
|
pass
|
|
|
|
try:
|
|
report['Number of workouts'] = int(stats['workouts'])
|
|
except KeyError: # pragma: no cover
|
|
pass
|
|
|
|
try:
|
|
report['Data set'] = "{a} strokes out of {b}".format(
|
|
a = stats['nr_strokes_qualifying'],
|
|
b = stats['nr_strokes']
|
|
)
|
|
except KeyError: # pragma: no cover
|
|
pass
|
|
|
|
|
|
try:
|
|
report['Median'] = sigdig(stats['median'])
|
|
except KeyError: # pragma: no cover
|
|
pass
|
|
|
|
try:
|
|
report['Median of qualifying strokes'] = sigdig(stats['median_q'])
|
|
except KeyError: # pragma: no cover
|
|
pass
|
|
|
|
subject = "Rowsandall.com: {alertname} ({startdate} to {enddate})".format(
|
|
startdate = stats['startdate'],
|
|
enddate = stats['enddate'],
|
|
alertname=alertname,
|
|
)
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'report':report,
|
|
'first_name':userfirstname,
|
|
'last_name':userlastname,
|
|
'startdate':stats['startdate'],
|
|
'enddate':stats['enddate'],
|
|
'siteurl':siteurl,
|
|
'rowerfirstname':rowerfirstname,
|
|
'alertname':alertname,
|
|
'othertexts':othertexts,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],subject,
|
|
'alertemail.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_email_transaction(
|
|
username, useremail, amount, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Rowsandall Payment Confirmation"
|
|
|
|
from_email = 'Rowsandall <admin@rowsandall.com>'
|
|
|
|
d = {
|
|
'name': username,
|
|
'siteurl': siteurl,
|
|
'amount': amount
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'paymentconfirmationemail.html',
|
|
d, **kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_email_failed_cancel(
|
|
name, email, username, id, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Rowsandall Subscription Cancellation Error"
|
|
|
|
from_email = 'Rowsandall <admin@rowsandall.com>'
|
|
|
|
d = {
|
|
'name': name,
|
|
'siteurl': siteurl,
|
|
'email': email,
|
|
'username': username,
|
|
'id': id,
|
|
}
|
|
|
|
res = send_template_email(from_email,["support@rowsandall.com"],
|
|
subject,
|
|
'cancel_subscription_fail_email.html',
|
|
d, **kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_send_email_subscription_update(
|
|
username, useremail, planname, recurring, price, amount,
|
|
end_of_billing_period, method, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
|
|
from_email = 'Rowsandall <admin@rowsandall.com>'
|
|
|
|
d = {
|
|
'name': username,
|
|
'siteurl': siteurl,
|
|
'amount': amount,
|
|
'price':price,
|
|
'planname': planname,
|
|
'recurring': recurring,
|
|
'end_of_billing_period': end_of_billing_period,
|
|
}
|
|
|
|
if method == 'down':
|
|
template_name = 'subscription_downgrade_email.html'
|
|
notification_template_name = 'subscription_downgrade_notification.html'
|
|
subject = "Rowsandall Change Confirmation"
|
|
else:
|
|
template_name = 'subscription_update_email.html'
|
|
notification_template_name = 'subscription_update_notification.html'
|
|
subject = "Rowsandall Payment Confirmation"
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
template_name,
|
|
d, **kwargs)
|
|
|
|
res = send_template_email(from_email,['info@rowsandall.com'],
|
|
'Subscription Update Notification',
|
|
notification_template_name,
|
|
d, **kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_email_subscription_create(
|
|
username, useremail, planname, recurring, price, amount,
|
|
end_of_billing_period, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Rowsandall Payment Confirmation"
|
|
|
|
from_email = 'Rowsandall <admin@rowsandall.com>'
|
|
|
|
|
|
d = {
|
|
'name': username,
|
|
'siteurl': siteurl,
|
|
'amount': amount,
|
|
'price':price,
|
|
'planname': planname,
|
|
'end_of_billing_period': end_of_billing_period,
|
|
'recurring': recurring,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'subscription_create_email.html',
|
|
d, **kwargs)
|
|
|
|
res = send_template_email(from_email,['info@rowsandall.com'],
|
|
'Subscription Update Notification',
|
|
'subscription_create_notification.html',
|
|
d, **kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_raceregistration(
|
|
useremail, username, registeredname, racename, raceid, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "A new competitor has registered for virtual challenge {n}".format(
|
|
n = racename
|
|
)
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'registeredname':registeredname,
|
|
'siteurl':siteurl,
|
|
'racename':racename,
|
|
'raceid':raceid,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'raceregisteredemail.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
def handle_sendemail_coursefail(
|
|
useremail, username, logfile, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "The validation of your course has failed"
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'trajectoryfailemail.html',
|
|
d,
|
|
cc=['info@rowsandall.com'],
|
|
attach_file=logfile,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_optout(
|
|
useremail, username, registeredname, racename, raceid, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "{name} has opted out from social media posts around challenge {n}".format(
|
|
n = racename,
|
|
name = registeredname
|
|
)
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'registeredname':registeredname,
|
|
'siteurl':siteurl,
|
|
'racename':racename,
|
|
'raceid':raceid,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'raceoptoutsocialmedia.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_racesubmission(
|
|
useremail, username, registeredname, racename, raceid, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "A new result has been submitted for virtual challenge {n}".format(
|
|
n = racename
|
|
)
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'siteurl':siteurl,
|
|
'registeredname':registeredname,
|
|
'racename':racename,
|
|
'raceid':raceid,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'racesubmissionemail.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_disqualification_email(
|
|
useremail,username,reason,message, racename, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Your result for {n} has been disqualified on rowsandall.com".format(
|
|
n = racename
|
|
)
|
|
|
|
from_email = 'Rowsandall <support@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'reason':reason,
|
|
'siteurl':siteurl,
|
|
'message': htmlstrip(message),
|
|
'racename':racename,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'disqualificationemail.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_withdraw_email(
|
|
useremail,username,reason,message, racename, **kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Your result for {n} has been removed on rowsandall.com".format(
|
|
n = racename
|
|
)
|
|
|
|
from_email = 'Rowsandall <support@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'reason':reason,
|
|
'siteurl':siteurl,
|
|
'message': htmlstrip(message),
|
|
'racename':racename,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'withdraw_email.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_expired(useremail,userfirstname,userlastname,expireddate,
|
|
**kwargs):
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
subject = "Your rowsandall.com paid account has expired"
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'first_name':userfirstname,
|
|
'last_name':userlastname,
|
|
'siteurl':siteurl,
|
|
'expireddate':expireddate,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,'accountexpiredemail.html',
|
|
d,cc=['support@rowsandall.com'],**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_breakthrough(workoutid, useremail,
|
|
userfirstname, userlastname,
|
|
btvalues=pd.DataFrame().to_json(),
|
|
**kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
btvalues = pd.read_json(btvalues)
|
|
btvalues.sort_values('delta', axis=0, inplace=True)
|
|
|
|
tablevalues = [
|
|
{'delta': t.delta,
|
|
'cpvalue': t.cpvalues,
|
|
'pwr': t.pwr
|
|
} for t in btvalues.itertuples()
|
|
]
|
|
|
|
# send email with attachment
|
|
subject = "A breakthrough workout on rowsandall.com"
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'first_name':userfirstname,
|
|
'siteurl':siteurl,
|
|
'workoutid':encoder.encode_hex(workoutid),
|
|
'btvalues':tablevalues,
|
|
}
|
|
|
|
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,'breakthroughemail.html',
|
|
d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
# send email when a breakthrough workout is uploaded
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_hard(workoutid, useremail,
|
|
userfirstname, userlastname,
|
|
btvalues=pd.DataFrame().to_json(),
|
|
debug=False,**kwargs):
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
|
|
btvalues = pd.read_json(btvalues)
|
|
btvalues.sort_values('delta', axis=0, inplace=True)
|
|
|
|
tablevalues = [
|
|
{'delta': t.delta,
|
|
'cpvalue': t.cpvalues,
|
|
'pwr': t.pwr
|
|
} for t in btvalues.itertuples()
|
|
]
|
|
|
|
# send email with attachment
|
|
subject = "That was a pretty hard workout on rowsandall.com"
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'first_name':userfirstname,
|
|
'siteurl':siteurl,
|
|
'workoutid':encoder.encode_hex(workoutid),
|
|
'btvalues':tablevalues,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,'hardemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
# send email when user deletes account
|
|
@app.task
|
|
def handle_sendemail_userdeleted(name, email, debug=False, **kwargs):
|
|
fullemail = 'roosendaalsander@gmail.com'
|
|
subject = 'User account deleted'
|
|
message = 'Sander,\n\n'
|
|
message += 'The user {name} ({email}) has just deleted his account'.format(
|
|
name=name,
|
|
email=email
|
|
)
|
|
email = EmailMessage(subject,message,
|
|
'Rowsandall <info@rowsandall.com>',
|
|
[fullemail])
|
|
|
|
if 'emailbounced' in kwargs: # pragma: no cover
|
|
emailbounced = kwargs['emailbounced']
|
|
else:
|
|
emailbounced = False
|
|
|
|
if not emailbounced:
|
|
res = email.send()
|
|
|
|
|
|
return 1
|
|
|
|
# send email to me when an unrecognized file is uploaded
|
|
@app.task
|
|
def handle_sendemail_unrecognized(unrecognizedfile, useremail,
|
|
debug=False,**kwargs):
|
|
|
|
# send email with attachment
|
|
fullemail = 'roosendaalsander@gmail.com'
|
|
subject = "Unrecognized file from Rowsandall.com"
|
|
message = "Dear Sander,\n\n"
|
|
message += "Please find attached a file that someone tried to upload to rowsandall.com. The file was not recognized as a valid file type.\n\n"
|
|
message += "User Email " + useremail + "\n\n"
|
|
message += "Best Regards, the Rowsandall Team"
|
|
|
|
email = EmailMessage(subject, message,
|
|
'Rowsandall <info@rowsandall.com>',
|
|
[fullemail])
|
|
|
|
try:
|
|
email.attach_file(unrecognizedfile)
|
|
except IOError: # pragma: no cover
|
|
pass
|
|
|
|
if 'emailbounced' in kwargs: # pragma: no cover
|
|
emailbounced = kwargs['emailbounced']
|
|
else:
|
|
emailbounced = False
|
|
|
|
if not emailbounced:
|
|
res = email.send()
|
|
|
|
|
|
# remove tcx file
|
|
try:
|
|
os.remove(unrecognizedfile)
|
|
except: # pragma: no cover
|
|
pass
|
|
|
|
return 1
|
|
|
|
|
|
# send email to owner when an unrecognized file is uploaded
|
|
@app.task
|
|
def handle_sendemail_unrecognizedowner(useremail, userfirstname,
|
|
debug=False,**kwargs):
|
|
|
|
# send email with attachment
|
|
fullemail = useremail
|
|
subject = "Unrecognized file from Rowsandall.com"
|
|
|
|
|
|
d = {
|
|
'first_name':userfirstname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'unrecognizedemail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemailics(first_name, last_name, email, icsfile, **kwargs):
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "Calendar File from Rowsandall.com"
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'icsemail.html',d,
|
|
attach_file=icsfile,**kwargs)
|
|
|
|
os.remove(icsfile)
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemailkml(first_name, last_name, email, kmlfile,**kwargs):
|
|
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'kmlemail.html',d,
|
|
attach_file=kmlfile,**kwargs)
|
|
|
|
os.remove(kmlfile)
|
|
return 1
|
|
|
|
# Send email with TCX attachment
|
|
@app.task
|
|
def handle_sendemailtcx(first_name, last_name, email, tcxfile,**kwargs):
|
|
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'tcxemail.html',d,
|
|
attach_file=tcxfile,**kwargs)
|
|
|
|
os.remove(tcxfile)
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_zip_file(emailfrom, subject, file,**kwargs): # pragma: no cover
|
|
message = "... zip processing ... "
|
|
try:
|
|
debug = kwargs['debug']
|
|
except KeyError:
|
|
debug = False
|
|
|
|
if debug:
|
|
print(message)
|
|
|
|
email = EmailMessage(subject, message,
|
|
emailfrom,
|
|
['workouts@rowsandall.com'])
|
|
email.attach_file(file)
|
|
if debug:
|
|
print("attaching")
|
|
|
|
|
|
res = email.send()
|
|
|
|
|
|
if debug:
|
|
print("sent")
|
|
time.sleep(60)
|
|
return 1
|
|
|
|
# Send email with CSV attachment
|
|
|
|
@app.task
|
|
def handle_sendemailsummary(first_name, last_name, email, csvfile, **kwargs):
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'summarymail.html',d,
|
|
attach_file=csvfile,
|
|
**kwargs)
|
|
|
|
try:
|
|
os.remove(csvfile)
|
|
except: # pragma: no cover
|
|
pass
|
|
|
|
return 1
|
|
|
|
#from rowers.emails import sendemail
|
|
|
|
@app.task
|
|
def handle_sendemailcsv(first_name, last_name, email, csvfile,**kwargs):
|
|
|
|
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'csvemail.html',d,
|
|
attach_file=csvfile,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_ical(first_name, last_name, email, url, icsfile, **kwargs):
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "Calendar File for your sessions from Rowsandall.com"
|
|
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
progressurl = SITE_URL_DEV
|
|
siteurl = SITE_URL_DEV
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
'url':url,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'icsemail.html',d,
|
|
attach_file=icsfile,**kwargs)
|
|
|
|
|
|
try:
|
|
os.remove(csvfile)
|
|
except:
|
|
pass
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemailfile(first_name, last_name, email, csvfile,**kwargs):
|
|
|
|
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'fileemail.html',d,
|
|
attach_file=csvfile,**kwargs)
|
|
|
|
|
|
if 'delete' in kwargs: # pragma: no cover
|
|
dodelete = kwargs['delete']
|
|
else:
|
|
dodelete = False
|
|
|
|
if dodelete: # pragma: no cover
|
|
try:
|
|
os.remove(csvfile)
|
|
except:
|
|
pass
|
|
|
|
return 1
|
|
|
|
# Calculate wind and stream corrections for OTW rowing
|
|
|
|
|
|
@app.task(bind=True)
|
|
def handle_otwsetpower(self,f1, boattype, boatclass, coastalbrand, weightvalue,
|
|
first_name, last_name, email, workoutid,
|
|
**kwargs):
|
|
|
|
job = self.request
|
|
job_id = job.id
|
|
|
|
|
|
if 'jobkey' in kwargs:
|
|
job_id = kwargs.pop('jobkey')
|
|
if 'ps' in kwargs: # pragma: no cover
|
|
ps = kwargs['ps']
|
|
else:
|
|
ps = [1,1,1,1]
|
|
|
|
if 'ratio' in kwargs: # pragma: no cover
|
|
ratio = kwargs['ratio']
|
|
else:
|
|
ratio = 1.0
|
|
if 'debug' in kwargs: # pragma: no cover
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
kwargs['jobid'] = job_id
|
|
|
|
|
|
|
|
|
|
weightvalue = float(weightvalue)
|
|
|
|
# check what the real file name is
|
|
if os.path.exists(f1):
|
|
csvfile = f1
|
|
elif os.path.exists(f1+'.csv'): # pragma: no cover
|
|
csvfile = f1+'.csv'
|
|
elif os.path.exists(f1+'.gz'): # pragma: no cover
|
|
csvfile = f1+'.gz'
|
|
else: # pragma: no cover
|
|
return 0
|
|
|
|
csvfile = os.path.abspath(csvfile)
|
|
|
|
# do something with boat type
|
|
try:
|
|
rowdata = rdata(csvfile=csvfile)
|
|
except IOError: # pragma: no cover
|
|
try:
|
|
rowdata = rdata(csvfile=csvfile)
|
|
except IOError:
|
|
rowdata = rdata(csvfile=csvfile)
|
|
|
|
# do calculation, but do not overwrite NK Empower Power data
|
|
powermeasured = False
|
|
try: # pragma: no cover
|
|
w = rowdata.df['wash']
|
|
if w.mean() != 0:
|
|
powermeasured = True
|
|
except KeyError:
|
|
pass
|
|
|
|
progressurl = SITE_URL
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
progressurl = SITE_URL_DEV
|
|
siteurl = SITE_URL_DEV
|
|
secret = PROGRESS_CACHE_SECRET
|
|
|
|
progressurl += "/rowers/record-progress/"
|
|
progressurl += job_id+'/'
|
|
|
|
|
|
# do something (this should return from go service)
|
|
with grpc.insecure_channel(
|
|
target='localhost:50051',
|
|
options=[('grpc.lb_policy_name', 'pick_first'),
|
|
('grpc.enable_retries', 0), ('grpc.keepalive_timeout_ms',
|
|
10000)]
|
|
) as channel:
|
|
try:
|
|
grpc.channel_ready_future(channel).result(timeout=10)
|
|
except grpc.FutureTimeoutError: # pragma: no cover
|
|
return 0
|
|
|
|
stub = calculator_pb2_grpc.PowerStub(channel)
|
|
response = stub.CalcPower(calculator_pb2.WorkoutPowerRequest(
|
|
filename = csvfile,
|
|
boattype = boattype,
|
|
coastalbrand = coastalbrand,
|
|
crewmass = weightvalue,
|
|
powermeasured = powermeasured,
|
|
progressurl = progressurl,
|
|
secret = secret,
|
|
silent = False,
|
|
boatclass = boatclass,
|
|
),timeout=1200)
|
|
result = response.result
|
|
if result == 0: # pragma: no cover
|
|
# send failure email
|
|
return 0
|
|
# do something with boat type
|
|
try:
|
|
rowdata = rdata(csvfile=csvfile)
|
|
except IOError: # pragma: no cover
|
|
try:
|
|
rowdata = rdata(csvfile=csvfile)
|
|
except IOError:
|
|
rowdata = rdata(csvfile=csvfile)
|
|
|
|
update_strokedata(workoutid, rowdata.df, debug=debug)
|
|
|
|
totaltime = rowdata.df['TimeStamp (sec)'].max(
|
|
) - rowdata.df['TimeStamp (sec)'].min()
|
|
try:
|
|
totaltime = totaltime + rowdata.df.loc[0, ' ElapsedTime (sec)']
|
|
except KeyError: # pragma: no cover
|
|
pass
|
|
df = getsmallrowdata_db(
|
|
['power', 'workoutid', 'time'], ids=[workoutid],
|
|
debug=debug)
|
|
thesecs = totaltime
|
|
maxt = 1.05 * thesecs
|
|
logarr = datautils.getlogarr(maxt)
|
|
dfgrouped = df.groupby(['workoutid'])
|
|
delta, cpvalues, avgpower = datautils.getcp(dfgrouped, logarr)
|
|
|
|
#delta,cpvalues,avgpower = datautils.getsinglecp(rowdata.df)
|
|
res, btvalues, res2 = utils.isbreakthrough(
|
|
delta, cpvalues, ps[0], ps[1], ps[2], ps[3], ratio)
|
|
if res: # pragma: no cover
|
|
handle_sendemail_breakthrough(
|
|
workoutid, email,
|
|
first_name,
|
|
last_name, btvalues=btvalues.to_json())
|
|
|
|
|
|
subject = "Your OTW Physics Calculations are ready"
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
|
|
|
|
d = {
|
|
'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
'workoutid':encoder.encode_hex(workoutid),
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'otwpoweremail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_updateergcp(rower_id,workoutfilenames,debug=False,**kwargs):
|
|
therows = []
|
|
for f1 in workoutfilenames:
|
|
try:
|
|
rowdata = rdata(csvfile=f1)
|
|
except IOError: # pragma: no cover
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.csv')
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.gz')
|
|
except IOError:
|
|
rowdata = 0
|
|
if rowdata != 0:
|
|
therows.append(rowdata)
|
|
|
|
cpdata = rowingdata.cumcpdata(therows)
|
|
cpdata.columns = cpdata.columns.str.lower()
|
|
|
|
updatecpdata_sql(rower_id,cpdata['delta'],cpdata['cp'],
|
|
table='ergcpdata',distance=cpdata['distance'],
|
|
debug=debug)
|
|
|
|
return 1
|
|
|
|
|
|
|
|
@app.task
|
|
def handle_updatecp(rower_id,workoutids,debug=False,table='cpdata',**kwargs):
|
|
columns = ['power','workoutid','time']
|
|
df = getsmallrowdata_db(columns,ids=workoutids,debug=debug)
|
|
|
|
if df.empty: # pragma: no cover
|
|
return 0
|
|
|
|
maxt = 1.05*df['time'].max()/1000.
|
|
|
|
logarr = datautils.getlogarr(maxt)
|
|
|
|
dfgrouped = df.groupby(['workoutid'])
|
|
|
|
delta,cpvalue,avgpower = datautils.getcp(dfgrouped,logarr)
|
|
|
|
updatecpdata_sql(rower_id,delta,cpvalue,debug=debug,table=table)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_makeplot(f1, f2, t, hrdata, plotnr, imagename,
|
|
debug=False,**kwargs):
|
|
|
|
hrmax = hrdata['hrmax']
|
|
hrut2 = hrdata['hrut2']
|
|
hrut1 = hrdata['hrut1']
|
|
hrat = hrdata['hrat']
|
|
hrtr = hrdata['hrtr']
|
|
hran = hrdata['hran']
|
|
ftp = hrdata['ftp']
|
|
powerzones = deserialize_list(hrdata['powerzones'])
|
|
hrzones = deserialize_list(hrdata['hrzones'])
|
|
powerperc = np.array(deserialize_list(hrdata['powerperc'])).astype(float)
|
|
|
|
rr = rowingdata.rower(hrmax=hrmax, hrut2=hrut2,
|
|
hrut1=hrut1, hrat=hrat,
|
|
hrtr=hrtr, hran=hran,
|
|
ftp=ftp, powerperc=powerperc,
|
|
powerzones=powerzones,
|
|
hrzones=hrzones)
|
|
try:
|
|
row = rdata(csvfile=f2, rower=rr)
|
|
except IOError: # pragma: no cover
|
|
row = rdata(csvfile=f2 + '.gz', rower=rr)
|
|
|
|
try:
|
|
haspower = row.df[' Power (watts)'].mean() > 50
|
|
except (TypeError, KeyError):
|
|
haspower = False
|
|
|
|
oterange = kwargs.pop('oterange',[85,240])
|
|
otwrange = kwargs.pop('otwrange',[85,185])
|
|
|
|
|
|
nr_rows = len(row.df)
|
|
if (plotnr in [1, 2, 4, 5, 8, 11, 9, 12]) and (nr_rows > 1200): # pragma: no cover
|
|
bin = int(nr_rows / 1200.)
|
|
df = row.df.groupby(lambda x: x / bin).mean()
|
|
row.df = df
|
|
nr_rows = len(row.df)
|
|
if (plotnr == 1):
|
|
fig1 = row.get_timeplot_erg(t,pacerange=oterange,**kwargs)
|
|
elif (plotnr == 2):
|
|
fig1 = row.get_metersplot_erg(t,pacerange=oterange,**kwargs)
|
|
elif (plotnr == 3):
|
|
try:
|
|
t += ' - Heart Rate Distribution'
|
|
except TypeError: # pragma: no cover
|
|
t = 'Heart Rate Distribution'
|
|
fig1 = row.get_piechart(t,**kwargs)
|
|
elif (plotnr == 4):
|
|
if haspower: # pragma: no cover
|
|
fig1 = row.get_timeplot_otwempower(t,pacerange=otwrange,**kwargs)
|
|
else:
|
|
fig1 = row.get_timeplot_otw(t,pacerange=otwrange,**kwargs)
|
|
elif (plotnr == 5):
|
|
if haspower: # pragma: no cover
|
|
fig1 = row.get_metersplot_otwempower(t,pacerange=otwrange,**kwargs)
|
|
else:
|
|
fig1 = row.get_metersplot_otw(t,pacerange=otwrange,**kwargs)
|
|
elif (plotnr == 6):
|
|
t += ' - Heart Rate Distribution'
|
|
fig1 = row.get_piechart(t,**kwargs)
|
|
elif (plotnr == 7) or (plotnr == 10):
|
|
fig1 = row.get_metersplot_erg2(t,**kwargs)
|
|
elif (plotnr == 8) or (plotnr == 11):
|
|
fig1 = row.get_timeplot_erg2(t,**kwargs)
|
|
elif (plotnr == 9) or (plotnr == 12):
|
|
fig1 = row.get_time_otwpower(t,pacerange=otwrange,**kwargs)
|
|
elif (plotnr == 13) or (plotnr == 16):
|
|
t += ' - Power Distribution'
|
|
fig1 = row.get_power_piechart(t,**kwargs)
|
|
|
|
if fig1 is None: # pragma: no cover
|
|
return 0
|
|
|
|
|
|
canvas = FigureCanvas(fig1)
|
|
|
|
canvas.print_figure('static/plots/' + imagename)
|
|
plt.close(fig1)
|
|
fig1.clf()
|
|
gc.collect()
|
|
|
|
return imagename
|
|
|
|
# Team related remote tasks
|
|
|
|
@app.task
|
|
def handle_sendemail_coachrequest(email,name,code,coachname,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = 'Invitation to add {n} to your athletes'.format(n=name)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'code':code,
|
|
'siteurl':siteurl
|
|
}
|
|
|
|
form_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachrequestemail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coachoffer_rejected(coachemail,coachname,name,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = coachemail
|
|
subject = '{n} has rejected your offer to be his coach on rowsandall.com'.format(n=name)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachofferrejectedemail.html',
|
|
d,
|
|
**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coachrequest_rejected(email,coachname,name,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = '{n} has rejected your coaching request on rowsandall.com'.format(n=coachname)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachrequestrejectedemail.html',
|
|
d,
|
|
**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coachrequest_accepted(email,coachname,name,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = '{n} has accepted your coaching request on rowsandall.com'.format(n=coachname)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachrequestacceptedemail.html',
|
|
d,
|
|
**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coachoffer_accepted(coachemail,coachname,name,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = coachemail
|
|
subject = '{n} has accepted your coaching offer on rowsandall.com'.format(n=name)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachofferacceptedemail.html',
|
|
d,
|
|
**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coacheerequest(email,name,code,coachname,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = '{n} requests coach access to your data on rowsandall.com'.format(n=coachname)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'code':code,
|
|
'siteurl':siteurl
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coacheerequestemail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_invite(email, name, code, teamname, manager,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Invitation to join team ' + teamname
|
|
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'manager':manager,
|
|
'code':code,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'teaminviteemail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemailnewresponse(first_name, last_name,
|
|
email,
|
|
commenter_first_name,
|
|
commenter_last_name,
|
|
comment,
|
|
workoutname, workoutid, commentid,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
subject = 'New comment on session ' + workoutname
|
|
|
|
comment = u''+comment
|
|
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
sessiontype = 'workout'
|
|
if 'sessiontype' in kwargs: # pragma: no cover
|
|
sessiontype=kwargs.pop('sessiontype')
|
|
|
|
commentlink = '/rowers/workout/{workoutid}/comment/'.format(
|
|
workoutid=encoder.encode_hex(workoutid))
|
|
if 'commentlink' in kwargs: # pragma: no cover
|
|
commentlink = kwargs.pop('commentlink')
|
|
|
|
d = {
|
|
'first_name':first_name,
|
|
'commenter_first_name':commenter_first_name,
|
|
'commenter_last_name':commenter_last_name,
|
|
'comment':comment,
|
|
'workoutname':workoutname,
|
|
'siteurl':siteurl,
|
|
'workoutid':workoutid,
|
|
'commentid':commentid,
|
|
'sessiontype':sessiontype,
|
|
'commentlink':commentlink,
|
|
}
|
|
|
|
res = send_template_email(from_email,
|
|
[fullemail],
|
|
subject,'teamresponseemail.html',d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemailnewcomment(first_name,
|
|
last_name,
|
|
email,
|
|
commenter_first_name,
|
|
commenter_last_name,
|
|
comment, workoutname,
|
|
workoutid,
|
|
debug=False,**kwargs):
|
|
|
|
|
|
|
|
fullemail = email
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
subject = 'New comment on session ' + workoutname
|
|
|
|
comment = u''+comment
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
sessiontype = 'workout'
|
|
if 'sessiontype' in kwargs: # pragma: no cover
|
|
sessiontype=kwargs.pop('sessiontype')
|
|
|
|
commentlink = '/rowers/workout/{workoutid}/comment/'.format(
|
|
workoutid=encoder.encode_hex(workoutid))
|
|
if 'commentlink' in kwargs: # pragma: no cover
|
|
commentlink = kwargs.pop('commentlink')
|
|
|
|
d = {
|
|
'first_name':first_name,
|
|
'commenter_first_name':commenter_first_name,
|
|
'commenter_last_name':commenter_last_name,
|
|
'comment':comment,
|
|
'workoutname':workoutname,
|
|
'siteurl':siteurl,
|
|
'workoutid':encoder.encode_hex(workoutid),
|
|
'sessiontype':sessiontype,
|
|
'commentlink':commentlink,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamresponseemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_template_email(template,email,fromemail,rowername,
|
|
subject,message,debug=False,**kwargs):
|
|
|
|
fullemail = [email]
|
|
d = {
|
|
'message':message,
|
|
'rowername':rowername,
|
|
}
|
|
|
|
res = send_template_email('Rowsandall <info@rowsandall.com>',
|
|
['info@rowsandall.com'],subject,
|
|
template,d,cc=[fromemail],bcc=fullemail,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_message(email,fromemail,rowername,message,teamname,managername,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = 'New message from team ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'rowername':rowername,
|
|
'teamname':teamname,
|
|
'managername':managername,
|
|
'message':message,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teammessage.html',d,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_request(email, name, code, teamname, requestor, id,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Request to join team ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug: # pragma: no cover
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'requestor':requestor,
|
|
'teamname':teamname,
|
|
'code': code,
|
|
'siteurl':siteurl,
|
|
'id':id,
|
|
'first_name':name,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamrequestemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_request_accept(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Welcome to ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamwelcomeemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_request_reject(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Your application to ' + teamname + ' was rejected'
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamrejectemail.html',d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_member_dropped(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'You were removed from ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamdropemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_team_removed(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = 'You were removed from ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamremoveemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_invite_reject(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Your invitation to ' + name + ' was rejected'
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teaminviterejectemail.html',d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_setcp(strokesdf,filename,workoutid,debug=False,**kwargs):
|
|
try:
|
|
os.remove(filename)
|
|
except FileNotFoundError:
|
|
pass
|
|
if not strokesdf.empty:
|
|
|
|
try:
|
|
totaltime = strokesdf['time'].max()
|
|
except KeyError: # pragma: no cover
|
|
return 0
|
|
try:
|
|
powermean = strokesdf['power'].mean()
|
|
except KeyError: # pragma: no cover
|
|
powermean = 0
|
|
|
|
|
|
if powermean != 0:
|
|
thesecs = totaltime
|
|
maxt = 1.05 * thesecs
|
|
|
|
if maxt > 0:
|
|
logarr = datautils.getlogarr(maxt)
|
|
dfgrouped = strokesdf.groupby(['workoutid'])
|
|
delta, cpvalues, avgpower = datautils.getcp(dfgrouped, logarr)
|
|
|
|
df = pd.DataFrame({
|
|
'delta':delta,
|
|
'cp':cpvalues,
|
|
'id':workoutid,
|
|
})
|
|
df.to_parquet(filename,engine='fastparquet',compression='GZIP')
|
|
return 1
|
|
|
|
return 1 # pragma: no cover
|
|
|
|
@app.task
|
|
def handle_sendemail_invite_accept(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Your invitation to ' + name + ' was accepted'
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teaminviteacceptemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
# Another simple task for debugging purposes
|
|
def add2(x, y,debug=False,**kwargs): # pragma: no cover
|
|
return x + y
|
|
|
|
graphql_url = "https://rp3rowing-app.com/graphql"
|
|
|
|
@app.task
|
|
def handle_update_wps(rid,types,ids,mode,debug=False,**kwargs):
|
|
df = getsmallrowdata_db(['time','driveenergy'],ids=ids)
|
|
try:
|
|
mask = df['driveenergy'] > 100
|
|
except (KeyError, TypeError): # pragma: no cover
|
|
return 0
|
|
try:
|
|
wps_median = int(df.loc[mask,'driveenergy'].median())
|
|
except ValueError: # pragma: no cover
|
|
return 0
|
|
|
|
if mode == 'water':
|
|
query = "UPDATE `rowers_rower` SET `median_wps` = '%s' WHERE `id` = '%s'" % (wps_median,rid)
|
|
else:
|
|
query = "UPDATE `rowers_rower` SET `median_wps_erg` = '%s' WHERE `id` = '%s'" % (wps_median,rid)
|
|
|
|
if debug: # pragma: no cover
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else:
|
|
engine = create_engine(database_url, echo=False)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
return wps_median
|
|
|
|
@app.task
|
|
def handle_rp3_async_workout(userid,rp3token,rp3id,startdatetime,max_attempts,debug=False,**kwargs):
|
|
headers = {'Authorization': 'Bearer ' + rp3token }
|
|
|
|
get_download_link = """{
|
|
download(workout_id: """ + str(rp3id) + """, type:csv){
|
|
id
|
|
status
|
|
link
|
|
}
|
|
}"""
|
|
|
|
have_link = False
|
|
download_url = ''
|
|
counter = 0
|
|
#max_attempts = 20
|
|
waittime = 3
|
|
while not have_link:
|
|
response = requests.post(
|
|
url=graphql_url,
|
|
headers=headers,
|
|
json={'query': get_download_link}
|
|
)
|
|
|
|
|
|
if response.status_code != 200: # pragma: no cover
|
|
have_link = True
|
|
|
|
try:
|
|
workout_download_details = pd.json_normalize(response.json()['data']['download'])
|
|
except: # pragma: no cover
|
|
return 0
|
|
|
|
if workout_download_details.iat[0,1] == 'ready':
|
|
download_url = workout_download_details.iat[0,2]
|
|
have_link = True
|
|
|
|
counter += 1
|
|
|
|
if counter>max_attempts: # pragma: no cover
|
|
have_link = True
|
|
|
|
time.sleep(waittime)
|
|
|
|
if download_url == '': # pragma: no cover
|
|
return 0
|
|
|
|
filename = 'media/RP3Import_'+str(rp3id)+'.csv'
|
|
|
|
res = requests.get(download_url,headers=headers)
|
|
|
|
|
|
if not startdatetime: # pragma: no cover
|
|
startdatetime = str(timezone.now())
|
|
|
|
try:
|
|
startdatetime = str(startdatetime)
|
|
except: # pragma: no cover
|
|
pass
|
|
|
|
if res.status_code != 200: # pragma: no cover
|
|
return 0
|
|
|
|
|
|
with open(filename,'wb') as f:
|
|
f.write(res.content)
|
|
|
|
uploadoptions = {
|
|
'secret': UPLOAD_SERVICE_SECRET,
|
|
'user': userid,
|
|
'file': filename,
|
|
'workouttype':'dynamic',
|
|
'boattype':'1x',
|
|
'rp3id':int(rp3id),
|
|
'startdatetime':startdatetime,
|
|
}
|
|
|
|
session = requests.session()
|
|
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
|
session.headers.update(newHeaders)
|
|
|
|
response = session.post(UPLOAD_SERVICE_URL,json=uploadoptions)
|
|
|
|
if response.status_code != 200: # pragma: no cover
|
|
return 0
|
|
|
|
workoutid = response.json()['id']
|
|
|
|
return workoutid
|
|
|
|
@app.task
|
|
def handle_nk_async_workout(alldata,userid,nktoken,nkid,delaysec,defaulttimezone,debug=False,**kwargs):
|
|
time.sleep(delaysec)
|
|
|
|
try:
|
|
data = alldata[nkid]
|
|
except KeyError: # pragma: no cover
|
|
try:
|
|
data = alldata[int(nkid)]
|
|
except KeyError:
|
|
return 0
|
|
|
|
params = {
|
|
'sessionIds': nkid,
|
|
}
|
|
|
|
authorizationstring = str('Bearer ' + nktoken)
|
|
headers = {'Authorization': authorizationstring,
|
|
'user-agent': 'sanderroosendaal',
|
|
'Content-Type': 'application/json',
|
|
}
|
|
|
|
# get strokes
|
|
url = NK_API_LOCATION+"api/v1/sessions/strokes"
|
|
response = requests.get(url,headers=headers,params=params)
|
|
|
|
if response.status_code != 200: # pragma: no cover
|
|
# error handling and logging
|
|
# dologging('nklog.log','Response status code {code}'.format(code=response.status_code))
|
|
return 0
|
|
|
|
jsonData = response.json()
|
|
strokeData = jsonData[str(nkid)]
|
|
|
|
df = strokeDataToDf(strokeData)
|
|
|
|
# get workout data
|
|
timestampbegin = df['timestamp'].min()
|
|
timestampend = df['timestamp'].max()
|
|
|
|
csvfilename = 'media/{code}_{nkid}.csv.gz'.format(
|
|
nkid=nkid,
|
|
code = uuid4().hex[:16]
|
|
)
|
|
|
|
df.to_csv(csvfilename, index_label='index', compression='gzip')
|
|
|
|
workoutid,error = add_workout_from_data(userid,nkid,data,df)
|
|
|
|
# dologging('nklog.log','NK Workout ID {id}'.format(id=workoutid))
|
|
|
|
if debug: # pragma: no cover
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else:
|
|
engine = create_engine(database_url, echo=False)
|
|
|
|
query = 'SELECT uploadedtonk from rowers_workout WHERE id ={workoutid}'.format(workoutid=workoutid)
|
|
|
|
newnkid = 0
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
tdata = result.fetchall()
|
|
if tdata:
|
|
newnkid = tdata[0][0]
|
|
|
|
conn.close()
|
|
|
|
parkedids = []
|
|
try:
|
|
with open('nkblocked.json','r') as nkblocked:
|
|
jsondata = json.load(nkblocked)
|
|
parkedids = jsondata['ids']
|
|
except FileNotFoundError: # pragma: no cover
|
|
pass
|
|
|
|
newparkedids = [id for id in parkedids if id != newnkid]
|
|
with open('nkblocked.json','wt') as nkblocked:
|
|
tdata = {'ids':newparkedids}
|
|
nkblocked.seek(0)
|
|
json.dump(tdata,nkblocked)
|
|
|
|
# evt update workout summary
|
|
|
|
# return
|
|
return workoutid
|
|
|
|
@app.task
|
|
def handle_c2_getworkout(userid,c2token,c2id,defaulttimezone,debug=False,**kwargs):
|
|
authorizationstring = str('Bearer ' + c2token)
|
|
headers = {'Authorization': authorizationstring,
|
|
'user-agent': 'sanderroosendaal',
|
|
'Content-Type': 'application/json'}
|
|
url = "https://log.concept2.com/api/users/me/results/"+str(c2id)
|
|
s = requests.get(url,headers=headers)
|
|
|
|
if s.status_code != 200: # pragma: no cover
|
|
return 0
|
|
|
|
data = s.json()['data']
|
|
alldata = {c2id:data}
|
|
splitdata = None
|
|
|
|
return handle_c2_async_workout(alldata,userid,c2token,c2id,0,defaulttimezone)
|
|
|
|
def df_from_summary(data):
|
|
distance = data['distance']
|
|
c2id = data['id']
|
|
workouttype = data['type']
|
|
verified = data['verified']
|
|
weightclass = data['weight_class']
|
|
try:
|
|
title = data['name']
|
|
except KeyError: # pragma: no cover
|
|
title = ""
|
|
try:
|
|
t = data['comments'].split('\n', 1)[0]
|
|
title += t[:40]
|
|
except: # pragma: no cover
|
|
title = ''
|
|
|
|
weightcategory = 'hwt'
|
|
if weightclass == "L":
|
|
weightcategory = 'lwt'
|
|
|
|
startdatetime,starttime,workoutdate,duration,starttimeunix,timezone = utils.get_startdatetime_from_c2data(data)
|
|
|
|
try:
|
|
splits = data['workout']['splits']
|
|
except KeyError: # pragma: no cover
|
|
splits = [0]
|
|
time = starttimeunix
|
|
elapsed_distance = 0
|
|
times = [0]
|
|
distances = [0]
|
|
try:
|
|
spms = [splits[0]['stroke_rate']]
|
|
except KeyError: # pragma: no cover
|
|
spms = [0]
|
|
try:
|
|
hrs = [splits[0]['heart_rate']['average']]
|
|
except KeyError: # pragma: no cover
|
|
hrs = [0]
|
|
|
|
for split in splits:
|
|
time += split['time']/10.
|
|
elapsed_distance += split['distance']
|
|
times.append(time)
|
|
distances.append(elapsed_distance)
|
|
spms.append(split['stroke_rate'])
|
|
try:
|
|
hrs.append(split['heart_rate']['average'])
|
|
except KeyError: # pragma: no cover
|
|
hrs.append(0)
|
|
|
|
df = pd.DataFrame({
|
|
'TimeStamp (sec)': times,
|
|
' Horizontal (meters)': distances,
|
|
' HRCur (bpm)': hrs,
|
|
' Cadence (stokes/min)': spms,
|
|
})
|
|
|
|
df[' ElapsedTime (sec)'] = df['TimeStamp (sec)']-starttimeunix
|
|
|
|
return df
|
|
|
|
|
|
|
|
@app.task
|
|
def handle_c2_async_workout(alldata,userid,c2token,c2id,delaysec,defaulttimezone,debug=False,**kwargs):
|
|
time.sleep(delaysec)
|
|
with open("c2_auto_import.log","a") as errorlog:
|
|
timestr = time.strftime("%Y%m%d-%H%M%S")
|
|
errorlog.write(timestr+' '+str(c2id)+' for userid '+str(userid)+'\r\n')
|
|
data = alldata[c2id]
|
|
splitdata = None
|
|
|
|
distance = data['distance']
|
|
c2id = data['id']
|
|
workouttype = data['type']
|
|
verified = data['verified']
|
|
|
|
weightclass = data['weight_class']
|
|
|
|
try:
|
|
has_strokedata = data['stroke_data']
|
|
except KeyError: # pragma: no cover
|
|
has_strokedata = True
|
|
|
|
|
|
|
|
s = 'User {userid}, C2 ID {c2id}'.format(userid=userid,c2id=c2id)
|
|
dologging('debuglog.log',s)
|
|
dologging('debuglog.log',json.dumps(data))
|
|
|
|
try:
|
|
title = data['name']
|
|
except KeyError:
|
|
title = ""
|
|
try:
|
|
t = data['comments'].split('\n', 1)[0]
|
|
title += t[:40]
|
|
except: # pragma: no cover
|
|
title = ''
|
|
|
|
weightcategory = 'hwt'
|
|
if weightclass == "L":
|
|
weightcategory = 'lwt'
|
|
|
|
# Create CSV file name and save data to CSV file
|
|
csvfilename ='media/Import_'+str(c2id)+'.csv.gz'
|
|
|
|
startdatetime,starttime,workoutdate,duration,starttimeunix,timezone = utils.get_startdatetime_from_c2data(data)
|
|
|
|
|
|
s = 'Time zone {timezone}, startdatetime {startdatetime}, duration {duration}'.format(
|
|
timezone=timezone,startdatetime=startdatetime,
|
|
duration=duration)
|
|
dologging('debuglog.log',s)
|
|
|
|
try:
|
|
notes = data['comments']
|
|
name = notes[:40]
|
|
except (KeyError,TypeError): # pragma: no cover
|
|
notes = 'C2 Import Workout from {startdatetime}'.format(startdatetime=startdatetime)
|
|
name = notes
|
|
|
|
authorizationstring = str('Bearer ' + c2token)
|
|
headers = {'Authorization': authorizationstring,
|
|
'user-agent': 'sanderroosendaal',
|
|
'Content-Type': 'application/json'}
|
|
url = "https://log.concept2.com/api/users/me/results/"+str(c2id)+"/strokes"
|
|
try:
|
|
s = requests.get(url,headers=headers)
|
|
except ConnectionError: # pragma: no cover
|
|
return 0
|
|
|
|
if s.status_code != 200: # pragma: no cover
|
|
dologging('debuglog.log','No Stroke Data. Status Code {code}'.format(code=s.status_code))
|
|
dologging('debuglog.log',s.text)
|
|
has_strokedata = False
|
|
|
|
if not has_strokedata: # pragma: no cover
|
|
df = df_from_summary(data)
|
|
else:
|
|
dologging('debuglog.log',json.dumps(s.json()))
|
|
|
|
strokedata = pd.DataFrame.from_dict(s.json()['data'])
|
|
|
|
res = make_cumvalues(0.1*strokedata['t'])
|
|
cum_time = res[0]
|
|
lapidx = res[1]
|
|
|
|
|
|
unixtime = cum_time+starttimeunix
|
|
# unixtime[0] = starttimeunix
|
|
seconds = 0.1*strokedata.loc[:,'t']
|
|
|
|
nr_rows = len(unixtime)
|
|
|
|
try: # pragma: no cover
|
|
latcoord = strokedata.loc[:,'lat']
|
|
loncoord = strokedata.loc[:,'lon']
|
|
except:
|
|
latcoord = np.zeros(nr_rows)
|
|
loncoord = np.zeros(nr_rows)
|
|
|
|
|
|
try:
|
|
strokelength = strokedata.loc[:,'strokelength']
|
|
except: # pragma: no cover
|
|
strokelength = np.zeros(nr_rows)
|
|
|
|
dist2 = 0.1*strokedata.loc[:,'d']
|
|
|
|
try:
|
|
spm = strokedata.loc[:,'spm']
|
|
except KeyError: # pragma: no cover
|
|
spm = 0*dist2
|
|
|
|
try:
|
|
hr = strokedata.loc[:,'hr']
|
|
except KeyError: # pragma: no cover
|
|
hr = 0*spm
|
|
|
|
pace = strokedata.loc[:,'p']/10.
|
|
pace = np.clip(pace,0,1e4)
|
|
pace = pace.replace(0,300)
|
|
|
|
velo = 500./pace
|
|
power = 2.8*velo**3
|
|
if workouttype == 'bike': # pragma: no cover
|
|
velo = 1000./pace
|
|
|
|
dologging('debuglog.log','Unix Time Stamp {s}'.format(s=unixtime[0]))
|
|
|
|
df = pd.DataFrame({'TimeStamp (sec)':unixtime,
|
|
' Horizontal (meters)': dist2,
|
|
' Cadence (stokes/min)':spm,
|
|
' HRCur (bpm)':hr,
|
|
' longitude':loncoord,
|
|
' latitude':latcoord,
|
|
' Stroke500mPace (sec/500m)':pace,
|
|
' Power (watts)':power,
|
|
' DragFactor':np.zeros(nr_rows),
|
|
' DriveLength (meters)':np.zeros(nr_rows),
|
|
' StrokeDistance (meters)':strokelength,
|
|
' DriveTime (ms)':np.zeros(nr_rows),
|
|
' StrokeRecoveryTime (ms)':np.zeros(nr_rows),
|
|
' AverageDriveForce (lbs)':np.zeros(nr_rows),
|
|
' PeakDriveForce (lbs)':np.zeros(nr_rows),
|
|
' lapIdx':lapidx,
|
|
' WorkoutState': 4,
|
|
' ElapsedTime (sec)':seconds,
|
|
'cum_dist': dist2
|
|
})
|
|
|
|
|
|
df.sort_values(by='TimeStamp (sec)',ascending=True)
|
|
|
|
res = df.to_csv(csvfilename,index_label='index',
|
|
compression='gzip'
|
|
)
|
|
|
|
uploadoptions = {
|
|
'secret':UPLOAD_SERVICE_SECRET,
|
|
'user':userid,
|
|
'file': csvfilename,
|
|
'title': title,
|
|
'workouttype':workouttype,
|
|
'boattype':'1x',
|
|
'c2id':c2id,
|
|
'startdatetime':startdatetime.isoformat(),
|
|
'timezone':str(timezone)
|
|
}
|
|
|
|
|
|
session = requests.session()
|
|
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
|
session.headers.update(newHeaders)
|
|
|
|
response = session.post(UPLOAD_SERVICE_URL,json=uploadoptions)
|
|
|
|
if response.status_code != 200: # pragma: no cover
|
|
return 0
|
|
|
|
workoutid = response.json()['id']
|
|
|
|
if debug: # pragma: no cover
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else:
|
|
engine = create_engine(database_url, echo=False)
|
|
|
|
query = 'SELECT uploadedtoc2 from rowers_workout WHERE id ={workoutid}'.format(workoutid=workoutid)
|
|
|
|
newc2id = 0
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
tdata = result.fetchall()
|
|
if tdata: # pragma: no cover
|
|
newc2id = tdata[0][0]
|
|
|
|
conn.close()
|
|
|
|
parkedids = []
|
|
with open('c2blocked.json','r') as c2blocked:
|
|
try:
|
|
jsondata = json.load(c2blocked)
|
|
parkedids = jsondata['ids']
|
|
except JSONDecodeError: # pragma: no cover
|
|
parkedids = []
|
|
|
|
newparkedids = [id for id in parkedids if id != newc2id]
|
|
with open('c2blocked.json','wt') as c2blocked:
|
|
tdata = {'ids':newparkedids}
|
|
c2blocked.seek(0)
|
|
json.dump(tdata,c2blocked)
|
|
|
|
# set distance, time
|
|
query = "UPDATE `rowers_workout` SET `distance` = '%s', `duration` = '%s' WHERE `id` = '%s'" % (distance, duration, workoutid)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
# summary
|
|
if 'workout' in data:
|
|
if 'splits' in data['workout']: # pragma: no cover
|
|
splitdata = data['workout']['splits']
|
|
elif 'intervals' in data['workout']: # pragma: no cover
|
|
splitdata = data['workout']['intervals']
|
|
else: # pragma: no cover
|
|
splitdata = False
|
|
else:
|
|
splitdata = False
|
|
|
|
if splitdata: # pragma: no cover
|
|
summary,sa,results = summaryfromsplitdata(splitdata,data,csvfilename,workouttype=workouttype)
|
|
|
|
query = "UPDATE `rowers_workout` SET `summary` = '%s' WHERE `id` = %s" % (summary, workoutid)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
from rowingdata.trainingparser import getlist
|
|
if sa:
|
|
values = getlist(sa)
|
|
units = getlist(sa,sel='unit')
|
|
types = getlist(sa,sel='type')
|
|
|
|
rowdata = rdata(csvfile=csvfilename)
|
|
if rowdata:
|
|
rowdata.updateintervaldata(values,
|
|
units,types,results)
|
|
|
|
rowdata.write_csv(csvfilename,gzip=True)
|
|
dataprepnodjango.update_strokedata(w.id,rowdata.df)
|
|
|
|
return workoutid
|
|
|
|
|
|
|
|
@app.task
|
|
def fetch_strava_workout(stravatoken,oauth_data,stravaid,csvfilename,userid,debug=False,**kwargs):
|
|
fetchresolution = 'high'
|
|
authorizationstring = str('Bearer '+stravatoken)
|
|
headers = {'Authorization': authorizationstring,
|
|
'user-agent': 'sanderroosendaal',
|
|
'Content-Type': 'application/json',
|
|
'resolution': 'medium',}
|
|
url = "https://www.strava.com/api/v3/activities/"+str(stravaid)
|
|
response = requests.get(url,headers=headers)
|
|
if response.status_code != 200: # pragma: no cover
|
|
tstamp = time.localtime()
|
|
timestamp = time.strftime('%b-%d-%Y_%H%M', tstamp)
|
|
with open('strava_webhooks.log','a') as f:
|
|
f.write('\n')
|
|
f.write(timestamp)
|
|
f.write(' ')
|
|
f.write(url)
|
|
f.write(' ')
|
|
f.write(json.dumps(headers))
|
|
f.write(' ')
|
|
f.write(authorizationstring)
|
|
f.write(' ')
|
|
f.write('handle_get_strava_file response code {code}\n'.format(code=response.status_code))
|
|
try:
|
|
f.write('Response json {json}\n'.format(json=response.json()))
|
|
except:
|
|
pass
|
|
|
|
return 0
|
|
|
|
|
|
workoutsummary = requests.get(url,headers=headers).json()
|
|
|
|
|
|
try:
|
|
startdatetime = workoutsummary['start_date']
|
|
except KeyError: # pragma: no cover
|
|
startdatetime = timezone.now()
|
|
|
|
spm = get_strava_stream(None,'cadence',stravaid,authorizationstring=authorizationstring)
|
|
hr = get_strava_stream(None,'heartrate',stravaid,authorizationstring=authorizationstring)
|
|
t = get_strava_stream(None,'time',stravaid,authorizationstring=authorizationstring)
|
|
velo = get_strava_stream(None,'velocity_smooth',stravaid,authorizationstring=authorizationstring)
|
|
d = get_strava_stream(None,'distance',stravaid,authorizationstring=authorizationstring)
|
|
coords = get_strava_stream(None,'latlng',stravaid,authorizationstring=authorizationstring)
|
|
power = get_strava_stream(None,'watts',stravaid,authorizationstring=authorizationstring)
|
|
|
|
tstamp = time.localtime()
|
|
timestamp = time.strftime('%b-%d-%Y_%H%M', tstamp)
|
|
with open('strava_webhooks.log','a') as f:
|
|
f.write('\n')
|
|
f.write(timestamp)
|
|
f.write(' ')
|
|
f.write(url)
|
|
f.write(' ')
|
|
f.write('Response data {data}\n'.format(data=workoutsummary))
|
|
|
|
if t is not None:
|
|
nr_rows = len(t)
|
|
else: # pragma: no cover
|
|
try:
|
|
duration = int(workoutsummary['elapsed_time'])
|
|
except KeyError:
|
|
duration = 0
|
|
t = pd.Series(range(duration+1))
|
|
|
|
nr_rows = len(t)
|
|
|
|
|
|
if nr_rows == 0: # pragma: no cover
|
|
return 0
|
|
|
|
if d is None: # pragma: no cover
|
|
d = 0*t
|
|
|
|
if spm is None: # pragma: no cover
|
|
spm = np.zeros(nr_rows)
|
|
|
|
if power is None: # pragma: no cover
|
|
power = np.zeros(nr_rows)
|
|
|
|
if hr is None: # pragma: no cover
|
|
hr = np.zeros(nr_rows)
|
|
|
|
if velo is None: # pragma: no cover
|
|
velo = np.zeros(nr_rows)
|
|
|
|
dt = np.diff(t).mean()
|
|
wsize = round(5./dt)
|
|
|
|
velo2 = ewmovingaverage(velo,wsize)
|
|
|
|
if coords is not None:
|
|
try:
|
|
lat = coords[:,0]
|
|
lon = coords[:,1]
|
|
except IndexError: # pragma: no cover
|
|
lat = np.zeros(len(t))
|
|
lon = np.zeros(len(t))
|
|
else: # pragma: no cover
|
|
lat = np.zeros(len(t))
|
|
lon = np.zeros(len(t))
|
|
|
|
|
|
|
|
|
|
strokelength = velo*60./(spm)
|
|
strokelength[np.isinf(strokelength)] = 0.0
|
|
|
|
|
|
pace = 500./(1.0*velo2)
|
|
pace[np.isinf(pace)] = 0.0
|
|
|
|
strokedata = pd.DataFrame({'t':10*t,
|
|
'd':10*d,
|
|
'p':10*pace,
|
|
'spm':spm,
|
|
'hr':hr,
|
|
'lat':lat,
|
|
'lon':lon,
|
|
'power':power,
|
|
'strokelength':strokelength,
|
|
})
|
|
|
|
try:
|
|
workouttype = mytypes.stravamappinginv[workoutsummary['type']]
|
|
except KeyError: # pragma: no cover
|
|
workouttype = 'other'
|
|
|
|
if workouttype.lower() == 'rowing': # pragma: no cover
|
|
workouttype = 'rower'
|
|
|
|
if 'summary_polyline' in workoutsummary['map'] and workouttype=='rower': # pragma: no cover
|
|
workouttype = 'water'
|
|
|
|
try:
|
|
comments = workoutsummary['comments']
|
|
except:
|
|
comments = ' '
|
|
|
|
try:
|
|
thetimezone = workoutsummary['timezone']
|
|
except: # pragma: no cover
|
|
thetimezone = 'UTC'
|
|
|
|
try:
|
|
rowdatetime = iso8601.parse_date(workoutsummary['date_utc'])
|
|
except KeyError:
|
|
rowdatetime = iso8601.parse_date(workoutsummary['start_date'])
|
|
except ParseError: # pragma: no cover
|
|
rowdatetime = iso8601.parse_date(workoutsummary['date'])
|
|
|
|
|
|
try:
|
|
intervaltype = workoutsummary['workout_type']
|
|
|
|
except KeyError:
|
|
intervaltype = ''
|
|
|
|
try:
|
|
title = workoutsummary['name']
|
|
except KeyError: # pragma: no cover
|
|
title = ""
|
|
try:
|
|
t = data['comments'].split('\n', 1)[0]
|
|
title += t[:20]
|
|
except:
|
|
title = ''
|
|
|
|
starttimeunix = arrow.get(rowdatetime).timestamp()
|
|
|
|
res = make_cumvalues(0.1*strokedata['t'])
|
|
cum_time = res[0]
|
|
lapidx = res[1]
|
|
|
|
unixtime = cum_time+starttimeunix
|
|
seconds = 0.1*strokedata.loc[:,'t']
|
|
|
|
nr_rows = len(unixtime)
|
|
|
|
try:
|
|
latcoord = strokedata.loc[:,'lat']
|
|
loncoord = strokedata.loc[:,'lon']
|
|
if latcoord.std() == 0 and loncoord.std() == 0 and workouttype == 'water': # pragma: no cover
|
|
workouttype = 'rower'
|
|
except: # pragma: no cover
|
|
latcoord = np.zeros(nr_rows)
|
|
loncoord = np.zeros(nr_rows)
|
|
if workouttype == 'water':
|
|
workouttype = 'rower'
|
|
|
|
|
|
|
|
try:
|
|
strokelength = strokedata.loc[:,'strokelength']
|
|
except: # pragma: no cover
|
|
strokelength = np.zeros(nr_rows)
|
|
|
|
dist2 = 0.1*strokedata.loc[:,'d']
|
|
|
|
try:
|
|
spm = strokedata.loc[:,'spm']
|
|
except KeyError: # pragma: no cover
|
|
spm = 0*dist2
|
|
|
|
try:
|
|
hr = strokedata.loc[:,'hr']
|
|
except KeyError: # pragma: no cover
|
|
hr = 0*spm
|
|
pace = strokedata.loc[:,'p']/10.
|
|
pace = np.clip(pace,0,1e4)
|
|
pace = pace.replace(0,300)
|
|
|
|
velo = 500./pace
|
|
|
|
try:
|
|
power = strokedata.loc[:,'power']
|
|
except KeyError: # pragma: no cover
|
|
power = 2.8*velo**3
|
|
|
|
#if power.std() == 0 and power.mean() == 0:
|
|
# power = 2.8*velo**3
|
|
|
|
# save csv
|
|
# Create data frame with all necessary data to write to csv
|
|
df = pd.DataFrame({'TimeStamp (sec)':unixtime,
|
|
' Horizontal (meters)': dist2,
|
|
' Cadence (stokes/min)':spm,
|
|
' HRCur (bpm)':hr,
|
|
' longitude':loncoord,
|
|
' latitude':latcoord,
|
|
' Stroke500mPace (sec/500m)':pace,
|
|
' Power (watts)':power,
|
|
' DragFactor':np.zeros(nr_rows),
|
|
' DriveLength (meters)':np.zeros(nr_rows),
|
|
' StrokeDistance (meters)':strokelength,
|
|
' DriveTime (ms)':np.zeros(nr_rows),
|
|
' StrokeRecoveryTime (ms)':np.zeros(nr_rows),
|
|
' AverageDriveForce (lbs)':np.zeros(nr_rows),
|
|
' PeakDriveForce (lbs)':np.zeros(nr_rows),
|
|
' lapIdx':lapidx,
|
|
' ElapsedTime (sec)':seconds,
|
|
'cum_dist':dist2,
|
|
})
|
|
|
|
|
|
|
|
df.sort_values(by='TimeStamp (sec)',ascending=True)
|
|
|
|
row = rowingdata.rowingdata(df=df)
|
|
row.write_csv(csvfilename,gzip=False)
|
|
|
|
summary = row.allstats()
|
|
maxdist = df['cum_dist'].max()
|
|
duration = row.duration
|
|
|
|
uploadoptions = {
|
|
'secret':UPLOAD_SERVICE_SECRET,
|
|
'user':userid,
|
|
'file': csvfilename,
|
|
'title': title,
|
|
'workouttype':workouttype,
|
|
'boattype':'1x',
|
|
'stravaid':stravaid,
|
|
}
|
|
|
|
session = requests.session()
|
|
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
|
session.headers.update(newHeaders)
|
|
response = session.post(UPLOAD_SERVICE_URL,json=uploadoptions)
|
|
|
|
t = time.localtime()
|
|
timestamp = time.strftime('%b-%d-%Y_%H%M', t)
|
|
with open('strava_webhooks.log','a') as f:
|
|
f.write('\n')
|
|
f.write(timestamp)
|
|
f.write(' ')
|
|
f.write('fetch_strava_workout posted file with strava id {stravaid} user id {userid}\n'.format(
|
|
stravaid=stravaid,userid=userid))
|
|
|
|
|
|
return 1
|