2993 lines
82 KiB
Python
2993 lines
82 KiB
Python
from __future__ import absolute_import
|
|
from __future__ import division
|
|
from __future__ import print_function
|
|
from __future__ import unicode_literals
|
|
|
|
""" Background tasks done by Celery (develop) or QR (production) """
|
|
import os
|
|
import time
|
|
import gc
|
|
import gzip
|
|
import shutil
|
|
import numpy as np
|
|
import re
|
|
import sys
|
|
import json
|
|
|
|
from scipy import optimize
|
|
from scipy.signal import savgol_filter
|
|
|
|
import rowingdata
|
|
from rowingdata import make_cumvalues
|
|
from uuid import uuid4
|
|
from rowingdata import rowingdata as rdata
|
|
from datetime import timedelta
|
|
from sqlalchemy import create_engine
|
|
|
|
#from celery import app
|
|
from rowers.celery import app
|
|
from celery import shared_task
|
|
|
|
import datetime
|
|
import pytz
|
|
import iso8601
|
|
from iso8601 import ParseError
|
|
|
|
from json.decoder import JSONDecodeError
|
|
|
|
from matplotlib.backends.backend_agg import FigureCanvas
|
|
#from matplotlib.backends.backend_cairo import FigureCanvasCairo as FigureCanvas
|
|
import matplotlib.pyplot as plt
|
|
from matplotlib import path
|
|
|
|
import grpc
|
|
import rowers.otw_power_calculator_pb2 as calculator_pb2
|
|
import rowers.otw_power_calculator_pb2_grpc as calculator_pb2_grpc
|
|
import rowers.rowing_workout_metrics_pb2 as metrics_pb2
|
|
import rowers.rowing_workout_metrics_pb2_grpc as metrics_pb2_grpc
|
|
|
|
from rowsandall_app.settings import SITE_URL
|
|
from rowsandall_app.settings_dev import SITE_URL as SITE_URL_DEV
|
|
from rowsandall_app.settings import PROGRESS_CACHE_SECRET
|
|
from rowsandall_app.settings import SETTINGS_NAME
|
|
from rowsandall_app.settings import workoutemailbox
|
|
from rowsandall_app.settings import UPLOAD_SERVICE_SECRET, UPLOAD_SERVICE_URL
|
|
|
|
from requests_oauthlib import OAuth1, OAuth1Session
|
|
|
|
import pandas as pd
|
|
|
|
from django_rq import job
|
|
from django.utils import timezone
|
|
from django.utils.html import strip_tags
|
|
|
|
from rowers.utils import deserialize_list,ewmovingaverage,wavg
|
|
from rowers.emails import htmlstrip
|
|
from rowers import mytypes
|
|
|
|
#from HTMLParser import HTMLParser
|
|
from html.parser import HTMLParser
|
|
class MLStripper(HTMLParser):
|
|
def __init__(self):
|
|
self.reset()
|
|
self.fed = []
|
|
def handle_data(self, d):
|
|
self.fed.append(d)
|
|
def get_data(self):
|
|
return ''.join(self.fed)
|
|
|
|
def strip_tags(html):
|
|
s = MLStripper()
|
|
s.feed(html)
|
|
return s.get_data()
|
|
|
|
|
|
from rowers.dataprepnodjango import (
|
|
update_strokedata, new_workout_from_file,
|
|
getsmallrowdata_db, updatecpdata_sql,update_c2id_sql,
|
|
update_workout_field_sql,
|
|
update_agegroup_db,fitnessmetric_to_sql,
|
|
add_c2_stroke_data_db,totaltime_sec_to_string,
|
|
create_c2_stroke_data_db,update_empower,
|
|
database_url_debug,database_url,dataprep,
|
|
# create_strava_stroke_data_db
|
|
)
|
|
|
|
from rowers.opaque import encoder
|
|
|
|
from django.core.mail import (
|
|
send_mail,
|
|
EmailMessage,EmailMultiAlternatives,
|
|
)
|
|
|
|
from django.template import Context
|
|
from django.db.utils import OperationalError
|
|
from jinja2 import Template,Environment,FileSystemLoader
|
|
env = Environment(loader = FileSystemLoader(["rowers/templates"]))
|
|
|
|
import rowers.datautils as datautils
|
|
import rowers.utils as utils
|
|
import requests
|
|
import rowers.longtask as longtask
|
|
import arrow
|
|
import stravalib
|
|
from stravalib.exc import ActivityUploadFailed
|
|
|
|
from rowers.utils import get_strava_stream
|
|
|
|
def safetimedelta(x):
|
|
try:
|
|
return timedelta(seconds=x)
|
|
except ValueError:
|
|
return timedelta(seconds=0)
|
|
|
|
siteurl = SITE_URL
|
|
|
|
# testing task
|
|
|
|
from rowers.emails import send_template_email
|
|
from rowers.courseutils import (
|
|
coursetime_paths, coursetime_first, time_in_path,
|
|
InvalidTrajectoryError
|
|
)
|
|
|
|
|
|
@app.task
|
|
def add(x, y):
|
|
return x + y
|
|
|
|
|
|
@app.task
|
|
def handle_c2_sync(workoutid,url,headers,data,debug=False,**kwargs):
|
|
response = requests.post(url,headers=headers,data=data)
|
|
if response.status_code not in [200,201]:
|
|
return 0
|
|
|
|
s = response.json()
|
|
c2id = s['data']['id']
|
|
|
|
res = update_workout_field_sql(workoutid,'uploadedtoc2',c2id,debug=debug)
|
|
|
|
return res
|
|
|
|
@app.task
|
|
def handle_sporttracks_sync(workoutid,url,headers,data,debug=False,**kwargs):
|
|
response = requests.post(url,headers=headers,data=data)
|
|
if response.status_code not in [200,201]:
|
|
return 0
|
|
|
|
t = response.json()
|
|
uri = t['uris'][0]
|
|
regex = '.*?sporttracks\.mobi\/api\/v2\/fitnessActivities/(\d+)\.json$'
|
|
m = re.compile(regex).match(uri).group(1)
|
|
|
|
id = int(m)
|
|
|
|
res = update_workout_field_sql(workoutid,'uploadedtosporttracks',id,debug=debug)
|
|
|
|
app.task
|
|
def handle_runkeeper_sync(workoutid,url,headers,data,debug=False,**kwargs):
|
|
response = requests.post(url,headers=headers,data=data)
|
|
if response.status_code not in [200,201]:
|
|
return 0
|
|
|
|
t = response.json()
|
|
uri = t['uris'][0]
|
|
regex = '.*?sporttracks\.mobi\/api\/v2\/fitnessActivities/(\d+)\.json$'
|
|
m = re.compile(regex).match(uri).group(1)
|
|
|
|
id = int(m)
|
|
|
|
res = update_workout_field_sql(workoutid,'uploadedtorunkeeper',id,debug=debug)
|
|
|
|
|
|
|
|
@app.task
|
|
def handle_strava_sync(stravatoken,workoutid,filename,name,activity_type,description,debug=False,**kwargs):
|
|
client = stravalib.Client(access_token=stravatoken)
|
|
failed = False
|
|
with open(filename,'rb') as f:
|
|
act = client.upload_activity(f,'tcx.gz',name=name)
|
|
try:
|
|
res = act.wait(poll_interval=5.0, timeout=60)
|
|
except ActivityUploadFailed:
|
|
failed = True
|
|
except JSONDecodeError:
|
|
failed = True
|
|
if not failed:
|
|
try:
|
|
act = client.update_activity(res.id,activity_type=activity_type,
|
|
description=description,device_name='Rowsandall.com')
|
|
except TypeError:
|
|
act = client.update_activity(res.id,activity_type=activity_type,
|
|
description=description)
|
|
except:
|
|
e = sys.exc_info()[0]
|
|
t = time.localtime()
|
|
timestamp = bytes('{t}'.format(t=time.strftime('%b-%d-%Y_%H%M', t)),'utf-8')
|
|
with open('stravalog.log','ab') as f:
|
|
f.write(b'\n')
|
|
f.write(timestamp)
|
|
f.write(str(e))
|
|
|
|
result = update_workout_field_sql(workoutid,'uploadedtostrava',res.id,debug=debug)
|
|
try:
|
|
os.remove(filename)
|
|
except:
|
|
pass
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_c2_import_stroke_data(c2token,
|
|
c2id,workoutid,
|
|
starttimeunix,
|
|
csvfilename,debug=True,**kwargs):
|
|
|
|
if 'workouttype' in kwargs:
|
|
workouttype = kwargs['workouttype']
|
|
else:
|
|
workouttype = 'rower'
|
|
|
|
authorizationstring = str('Bearer ' + c2token)
|
|
headers = {'Authorization': authorizationstring,
|
|
'user-agent': 'sanderroosendaal',
|
|
'Content-Type': 'application/json'}
|
|
url = "https://log.concept2.com/api/users/me/results/"+str(c2id)+"/strokes"
|
|
s = requests.get(url,headers=headers)
|
|
if s.status_code == 200:
|
|
strokedata = pd.DataFrame.from_dict(s.json()['data'])
|
|
result = add_c2_stroke_data_db(
|
|
strokedata,workoutid,starttimeunix,
|
|
csvfilename,debug=debug,workouttype=workouttype
|
|
)
|
|
|
|
return 1
|
|
else:
|
|
url = "https://log.concept2.com/api/users/me/results/{id}".format(id=c2id)
|
|
|
|
s = requests.get(url,headers=headers)
|
|
|
|
if s.status_code == 200:
|
|
workoutdata = s.json()['data']
|
|
distance = workoutdata['distance']
|
|
c2id = workoutdata['id']
|
|
workouttype = workoutdata['type']
|
|
verified = workoutdata['verified']
|
|
startdatetime = iso8601.parse_date(workoutdata['date'])
|
|
weightclass = workoutdata['weight_class']
|
|
weightcategory = 'hwt'
|
|
if weightclass == "L":
|
|
weightcategory = 'lwt'
|
|
totaltime = workoutdata['time']/10.
|
|
duration = totaltime_sec_to_string(totaltime)
|
|
duration = datetime.datetime.strptime(duration,'%H:%M:%S.%f').time()
|
|
|
|
try:
|
|
timezone_str = tz(workoutdata['timezone'])
|
|
except:
|
|
timezone_str = 'UTC'
|
|
|
|
workoutdate = startdatetime.astimezone(
|
|
pytz.timezone(timezone_str)
|
|
).strftime('%Y-%m-%d')
|
|
starttime = startdatetime.astimezone(
|
|
pytz.timezone(timezone_str)
|
|
).strftime('%H:%M:%S')
|
|
|
|
result = create_c2_stroke_data_db(
|
|
distance,duration,workouttype,
|
|
workoutid,starttimeunix,
|
|
csvfilename,debug=debug,
|
|
)
|
|
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
return 0
|
|
|
|
def getagegrouprecord(age,sex='male',weightcategory='hwt',
|
|
distance=2000,duration=None,indf=pd.DataFrame()):
|
|
|
|
if not duration:
|
|
try:
|
|
df = indf[indf['distance'] == distance]
|
|
except KeyError:
|
|
df = pd.DataFrame()
|
|
else:
|
|
duration = 60*int(duration)
|
|
df = indf[indf['duration'] == duration]
|
|
|
|
if not df.empty:
|
|
ages = df['age']
|
|
powers = df['power']
|
|
|
|
#poly_coefficients = np.polyfit(ages,powers,6)
|
|
fitfunc = lambda pars, x: np.abs(pars[0])*(1-x/max(120,pars[1]))-np.abs(pars[2])*np.exp(-x/np.abs(pars[3]))+np.abs(pars[4])*(np.sin(np.pi*x/max(50,pars[5])))
|
|
errfunc = lambda pars, x,y: fitfunc(pars,x)-y
|
|
|
|
p0 = [700,120,700,10,100,100]
|
|
|
|
p1, success = optimize.leastsq(errfunc,p0[:],
|
|
args = (ages,powers))
|
|
|
|
if success:
|
|
power = fitfunc(p1, float(age))
|
|
|
|
#power = np.polyval(poly_coefficients,age)
|
|
|
|
power = 0.5*(np.abs(power)+power)
|
|
else:
|
|
power = 0
|
|
else:
|
|
power = 0
|
|
|
|
return power
|
|
|
|
def polygon_to_path(polygon,debug=True):
|
|
pid = polygon[0]
|
|
query = "SELECT id, latitude, longitude FROM rowers_geopoint WHERE polygon_id = {pid} ORDER BY order_in_poly ASC".format(
|
|
pid=pid
|
|
)
|
|
if debug:
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else:
|
|
engine = create_engine(database_url, echo=False)
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
points = result.fetchall()
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
s = []
|
|
|
|
for point in points:
|
|
s.append([point[1],point[2]])
|
|
|
|
p = path.Path(s[:-1])
|
|
|
|
return p
|
|
|
|
@app.task(bind=True)
|
|
def handle_check_race_course(self,
|
|
f1,workoutid,courseid,
|
|
recordid,useremail,userfirstname,
|
|
**kwargs):
|
|
|
|
logfile = 'courselog_{workoutid}_{courseid}.log'.format(workoutid=workoutid,courseid=courseid)
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
if 'splitsecond' in kwargs:
|
|
splitsecond = kwargs['splitsecond']
|
|
else:
|
|
splitsecond = 0
|
|
|
|
if 'referencespeed' in kwargs:
|
|
referencespeed = kwargs['referencespeed']
|
|
else:
|
|
referencespeed = 5.0
|
|
|
|
if 'coursedistance' in kwargs:
|
|
coursedistance = kwargs['coursedistance']
|
|
else:
|
|
coursedistance = 0
|
|
|
|
mode = 'race'
|
|
if 'mode' in kwargs:
|
|
mode = kwargs['mode']
|
|
|
|
columns = ['time',' latitude',' longitude','cum_dist']
|
|
|
|
try:
|
|
row = rdata(csvfile=f1)
|
|
except IOError:
|
|
try:
|
|
row = rdata(csvfile=f1 + '.csv')
|
|
except IOError:
|
|
try:
|
|
row = rdata(csvfile=f1 + '.gz')
|
|
except IOError:
|
|
return 0
|
|
|
|
row.calc_dist_from_gps()
|
|
rowdata = row.df
|
|
rowdata['cum_dist'] = rowdata['gps_dist_calculated']
|
|
|
|
try:
|
|
s = rowdata[' latitude']
|
|
except KeyError:
|
|
return 0
|
|
|
|
rowdata.rename(columns = {
|
|
' latitude':'latitude',
|
|
' longitude':'longitude',
|
|
'TimeStamp (sec)': 'time',
|
|
}, inplace=True)
|
|
|
|
rowdata.fillna(method='backfill',inplace=True)
|
|
|
|
rowdata['time'] = rowdata['time']-rowdata.loc[0,'time']
|
|
rowdata = rowdata[rowdata['time']>splitsecond]
|
|
# we may want to expand the time (interpolate)
|
|
rowdata['dt'] = rowdata['time'].apply(
|
|
lambda x: safetimedelta(x)
|
|
)
|
|
rowdata = rowdata.resample('100ms',on='dt').mean()
|
|
rowdata = rowdata.interpolate()
|
|
|
|
# initiate database engine
|
|
|
|
if debug:
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else:
|
|
engine = create_engine(database_url, echo=False)
|
|
|
|
# get polygons
|
|
query = "SELECT id,name FROM rowers_geopolygon WHERE course_id = {courseid} ORDER BY order_in_course ASC".format(
|
|
courseid=courseid
|
|
)
|
|
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
polygons = result.fetchall()
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
paths = []
|
|
for polygon in polygons:
|
|
path = polygon_to_path(polygon,debug=debug)
|
|
paths.append(path)
|
|
|
|
startsecond = 0
|
|
endsecond = rowdata['time'].max()
|
|
|
|
# check how many times went through start polygon
|
|
try:
|
|
entrytimes,entrydistances = time_in_path(rowdata,paths[0],maxmin='max',getall=True,
|
|
name=polygons[0].name,logfile=logfile)
|
|
with open(logfile,'ab') as f:
|
|
t = time.localtime()
|
|
timestamp = bytes('{t}'.format(t=time.strftime('%b-%d-%Y_%H%M', t)),'utf-8')
|
|
f.write(b'\n')
|
|
f.write(bytes('Course id {n}, Record id {m}'.format(n=courseid,m=recordid),'utf-8'))
|
|
f.write(b'\n')
|
|
f.write(timestamp)
|
|
f.write(b' ')
|
|
f.write(bytes('Found {n} entrytimes'.format(n=len(entrytimes)),'utf-8'))
|
|
|
|
except InvalidTrajectoryError:
|
|
entrytimes = []
|
|
entrydistances = []
|
|
coursecompleted = False
|
|
coursemeters = 0
|
|
coursetimeseconds = 0
|
|
|
|
|
|
cseconds = []
|
|
cmeters = []
|
|
ccomplete = []
|
|
startseconds = []
|
|
endseconds = []
|
|
|
|
for startt in entrytimes:
|
|
with open(logfile,'ab') as f:
|
|
t = time.localtime()
|
|
timestamp = bytes('{t}'.format(t=time.strftime('%b-%d-%Y_%H%M', t)),'utf-8')
|
|
f.write(b'\n')
|
|
f.write(timestamp)
|
|
f.write(b' ')
|
|
f.write(bytes('Path starting at {t}'.format(t=startt),'utf-8'))
|
|
rowdata2 = rowdata[rowdata['time']>(startt-10.)]
|
|
|
|
(
|
|
coursetimeseconds,
|
|
coursemeters,
|
|
coursecompleted,
|
|
|
|
) = coursetime_paths(rowdata2,paths,polygons=polygons,logfile=logfile)
|
|
(
|
|
coursetimefirst,
|
|
coursemetersfirst,
|
|
firstcompleted
|
|
) = coursetime_first(
|
|
rowdata2,paths,polygons=polygons,logfile=logfile)
|
|
|
|
|
|
|
|
coursetimesecondsnet = coursetimeseconds-coursetimefirst
|
|
coursemeters = coursemeters-coursemetersfirst
|
|
|
|
|
|
cseconds.append(coursetimesecondsnet)
|
|
cmeters.append(coursemeters)
|
|
ccomplete.append(coursecompleted)
|
|
endseconds.append(coursetimeseconds)
|
|
startseconds.append(coursetimefirst)
|
|
|
|
|
|
records = pd.DataFrame({
|
|
'coursetimeseconds':cseconds,
|
|
'coursecompleted': ccomplete,
|
|
'coursemeters': cmeters,
|
|
'startsecond':startseconds,
|
|
'endsecond':endseconds,
|
|
})
|
|
|
|
records = records[records['coursecompleted'] == True]
|
|
|
|
|
|
if len(records):
|
|
coursecompleted = True
|
|
mintime = records['coursetimeseconds'].min()
|
|
coursetimeseconds = records[records['coursetimeseconds'] == mintime]['coursetimeseconds'].min()
|
|
coursemeters = records[records['coursetimeseconds'] == mintime]['coursemeters'].min()
|
|
startsecond = records[records['coursetimeseconds'] == mintime]['startsecond'].min()
|
|
endsecond = records[records['coursetimeseconds'] == mintime]['endsecond'].min()
|
|
else:
|
|
coursecompleted = False
|
|
|
|
points = 0
|
|
if coursecompleted:
|
|
if coursedistance == 0:
|
|
coursedistance = coursemeters
|
|
velo = coursedistance/coursetimeseconds
|
|
points = 100*(2.-referencespeed/velo)
|
|
query = 'UPDATE rowers_virtualraceresult SET coursecompleted = 1, duration = "{duration}", distance = {distance}, workoutid = {workoutid}, startsecond = {startsecond}, endsecond = {endsecond}, points={points} WHERE id={recordid}'.format(
|
|
recordid=recordid,
|
|
duration=totaltime_sec_to_string(coursetimeseconds),
|
|
distance=int(coursemeters),
|
|
points=points,
|
|
workoutid=workoutid,
|
|
startsecond=startsecond,
|
|
endsecond=endsecond,
|
|
)
|
|
|
|
if mode == 'coursetest':
|
|
query = 'UPDATE rowers_coursetestresult SET coursecompleted = 1, duration = "{duration}", distance = {distance}, workoutid = {workoutid}, startsecond = {startsecond}, endsecond = {endsecond}, points={points} WHERE id={recordid}'.format(
|
|
recordid=recordid,
|
|
duration=totaltime_sec_to_string(coursetimeseconds),
|
|
distance=int(coursemeters),
|
|
points=points,
|
|
workoutid=workoutid,
|
|
startsecond=startsecond,
|
|
endsecond=endsecond,
|
|
)
|
|
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
os.remove(logfile)
|
|
|
|
return 1
|
|
|
|
else:
|
|
query = 'UPDATE rowers_virtualraceresult SET coursecompleted = 0, duration = "{duration}", distance = {distance}, workoutid = {workoutid}, startsecond = {startsecond}, endsecond = {endsecond}, points={points} WHERE id={recordid}'.format(
|
|
recordid=recordid,
|
|
duration=totaltime_sec_to_string(0),
|
|
distance=0,
|
|
points=0.0,
|
|
workoutid=workoutid,
|
|
startsecond=startsecond,
|
|
endsecond=endsecond,
|
|
)
|
|
|
|
if mode == 'coursetest':
|
|
query = 'UPDATE rowers_coursetestresult SET coursecompleted = 0, duration = "{duration}", distance = {distance}, workoutid = {workoutid}, startsecond = {startsecond}, endsecond = {endsecond}, points={points} WHERE id={recordid}'.format(
|
|
recordid=recordid,
|
|
duration=totaltime_sec_to_string(0),
|
|
distance=0,
|
|
points=0,
|
|
workoutid=workoutid,
|
|
startsecond=startsecond,
|
|
endsecond=endsecond,
|
|
)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
# add times for all gates to log file
|
|
with open(logfile,'ab') as f:
|
|
t = time.localtime()
|
|
f.write(b'\n')
|
|
f.write(b' ')
|
|
f.write(b'--- LOG of all gate times---')
|
|
|
|
for path,polygon in zip(paths,polygons):
|
|
( secs,meters,completed) = coursetime_paths(rowdata,
|
|
[path],polygons=[polygon],logfile=logfile)
|
|
with open(logfile,'ab') as f:
|
|
line = " time: {t} seconds, distance: {m} meters".format(t=secs,m=meters)
|
|
f.write(bytes(line,'utf-8'))
|
|
|
|
# send email
|
|
handle_sendemail_coursefail(
|
|
useremail,userfirstname,logfile
|
|
)
|
|
|
|
os.remove(logfile)
|
|
|
|
return 2
|
|
|
|
return 0
|
|
|
|
|
|
@app.task(bind=True)
|
|
def handle_getagegrouprecords(self,
|
|
df,
|
|
distances,durations,
|
|
age,sex,weightcategory,
|
|
**kwargs):
|
|
wcdurations = []
|
|
wcpower = []
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
|
|
df = pd.read_json(df)
|
|
|
|
if sex == 'not specified':
|
|
return 0
|
|
|
|
for distance in distances:
|
|
worldclasspower = getagegrouprecord(
|
|
age,
|
|
sex=sex,
|
|
distance=distance,
|
|
weightcategory=weightcategory,indf=df,
|
|
)
|
|
velo = (worldclasspower/2.8)**(1./3.)
|
|
if not np.isinf(worldclasspower) and not np.isnan(worldclasspower):
|
|
try:
|
|
duration = distance/velo
|
|
wcdurations.append(duration)
|
|
wcpower.append(worldclasspower)
|
|
except ZeroDivisionError:
|
|
pass
|
|
|
|
|
|
|
|
for duration in durations:
|
|
worldclasspower = getagegrouprecord(
|
|
age,
|
|
sex=sex,
|
|
duration=duration,
|
|
weightcategory=weightcategory,indf=df
|
|
)
|
|
if not np.isinf(worldclasspower) and not np.isnan(worldclasspower):
|
|
try:
|
|
velo = (worldclasspower/2.8)**(1./3.)
|
|
distance = int(60*duration*velo)
|
|
wcdurations.append(60.*duration)
|
|
wcpower.append(worldclasspower)
|
|
except ValueError:
|
|
pass
|
|
|
|
update_agegroup_db(age,sex,weightcategory,wcdurations,wcpower,
|
|
debug=debug)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_get_garmin_file(client_id,
|
|
client_secret,
|
|
garmintoken,
|
|
garminrefreshtoken,
|
|
userid,
|
|
url,
|
|
filetype,
|
|
*args,
|
|
**kwargs):
|
|
|
|
|
|
regex = '.*\?id=(\d+)'
|
|
try:
|
|
m = re.compile(regex).match(url).group(1)
|
|
garminid = int(m)
|
|
except AttributeError:
|
|
garminid = ''
|
|
|
|
|
|
garmin = OAuth1Session(client_id,
|
|
client_secret=client_secret,
|
|
resource_owner_key=garmintoken,
|
|
resource_owner_secret=garminrefreshtoken,
|
|
)
|
|
|
|
filename = 'media/{code}_{id}.'.format(
|
|
code = uuid4().hex[:16],
|
|
id = userid
|
|
)+filetype
|
|
|
|
response = garmin.get(url, stream=True)
|
|
if response.status_code == 200:
|
|
with open(filename, 'wb') as out_file:
|
|
shutil.copyfileobj(response.raw, out_file)
|
|
|
|
|
|
del response
|
|
|
|
uploadoptions = {
|
|
'secret':UPLOAD_SERVICE_SECRET,
|
|
'user':userid,
|
|
'file': filename,
|
|
'title': '',
|
|
'workouttype':'water',
|
|
'boattype':'1x',
|
|
'garminid': garminid,
|
|
}
|
|
session = requests.session()
|
|
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
|
session.headers.update(newHeaders)
|
|
response = session.post(UPLOAD_SERVICE_URL,json=uploadoptions)
|
|
|
|
|
|
return 1
|
|
|
|
@app.task(bind=True)
|
|
def long_test_task(self,aantal,debug=False,job=None,session_key=None):
|
|
job = self.request
|
|
|
|
return longtask.longtask(aantal,jobid=job.id,debug=debug,
|
|
session_key=session_key)
|
|
|
|
@app.task(bind=True)
|
|
def long_test_task2(self,aantal,**kwargs):
|
|
#debug=False,job=None,jobid='aap'):
|
|
job = self.request
|
|
job_id = job.id
|
|
|
|
if 'jobkey' in kwargs:
|
|
job_id = kwargs.pop('jobkey')
|
|
|
|
kwargs['jobid'] = job_id
|
|
|
|
return longtask.longtask2(aantal,**kwargs)
|
|
|
|
|
|
|
|
|
|
# create workout
|
|
@app.task
|
|
def handle_new_workout_from_file(r, f2,
|
|
workouttype='rower',
|
|
boattype='1x',
|
|
makeprivate=False,
|
|
notes='',debug=False):
|
|
return new_workout_from_file(r, f2, workouttype,
|
|
title, makeprivate, notes)
|
|
|
|
# process and update workouts
|
|
|
|
@app.task(bind=True)
|
|
def handle_update_empower(self,
|
|
useremail,
|
|
workoutdicts,
|
|
debug=False, **kwargs):
|
|
|
|
job = self.request
|
|
job_id = job.id
|
|
|
|
if 'jobkey' in kwargs:
|
|
job_id = kwargs.pop('jobkey')
|
|
|
|
aantal = len(workoutdicts)
|
|
counter = 0
|
|
|
|
for workoutdict in workoutdicts:
|
|
wid = workoutdict['id']
|
|
inboard = workoutdict['inboard']
|
|
oarlength = workoutdict['oarlength']
|
|
boattype = workoutdict['boattype']
|
|
f1 = workoutdict['filename']
|
|
|
|
# oarlength consistency checks will be done in view
|
|
|
|
havedata = 1
|
|
try:
|
|
rowdata = rdata(csvfile=f1)
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.csv')
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.gz')
|
|
except IOError:
|
|
havedata = 0
|
|
|
|
progressurl = SITE_URL
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
progressurl = SITE_URL_DEV
|
|
siteurl = SITE_URL_DEV
|
|
secret = PROGRESS_CACHE_SECRET
|
|
|
|
kwargs['job_id'] = job_id
|
|
|
|
progressurl += "/rowers/record-progress/"
|
|
progressurl += job_id
|
|
|
|
if havedata:
|
|
success = update_empower(wid, inboard, oarlength, boattype,
|
|
rowdata.df, f1, debug=debug)
|
|
|
|
counter += 1
|
|
|
|
progress = 100.*float(counter)/float(aantal)
|
|
|
|
post_data = {
|
|
"secret":secret,
|
|
"value":progress,
|
|
}
|
|
|
|
s = requests.post(progressurl, data=post_data)
|
|
status_code = s.status_code
|
|
|
|
subject = "Rowsandall.com Your Old Empower Oarlock data have been corrected"
|
|
message = """
|
|
We have updated Power and Work per Stroke data according to the instructions by Nielsen-Kellerman.
|
|
"""
|
|
|
|
email = EmailMessage(subject, message,
|
|
'Rowsandall <info@rowsandall.com>',
|
|
[useremail])
|
|
|
|
if 'emailbounced' in kwargs:
|
|
emailbounced = kwargs['emailbounced']
|
|
else:
|
|
emailbounced = False
|
|
|
|
if not emailbounced:
|
|
res = email.send()
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_calctrimp(id,
|
|
csvfilename,
|
|
ftp,
|
|
sex,
|
|
hrftp,
|
|
hrmax,
|
|
hrmin,
|
|
debug=False, **kwargs):
|
|
if debug:
|
|
engine = create_engine(database_url_debug, echo=False)
|
|
else:
|
|
engine = create_engine(database_url, echo=False)
|
|
|
|
tss = 0
|
|
normp = 0
|
|
trimp = 0
|
|
hrtss = 0
|
|
normv = 0
|
|
normw = 0
|
|
|
|
# check what the real file name is
|
|
if os.path.exists(csvfilename):
|
|
csvfile = csvfilename
|
|
elif os.path.exists(csvfilename+'.csv'):
|
|
csvfile = csvfilename+'.csv'
|
|
elif os.path.exists(csvfilename+'.gz'):
|
|
csvfile = csvfilename+'.gz'
|
|
else:
|
|
return 0
|
|
|
|
csvfile = os.path.abspath(csvfile)
|
|
|
|
with grpc.insecure_channel(
|
|
target='localhost:50052',
|
|
options=[('grpc.lb_policy_name', 'pick_first'),
|
|
('grpc.enable_retries', 0), ('grpc.keepalive_timeout_ms',
|
|
10000)]
|
|
) as channel:
|
|
try:
|
|
grpc.channel_ready_future(channel).result(timeout=10)
|
|
except grpc.FutureTimeoutError:
|
|
return 0
|
|
|
|
stub = metrics_pb2_grpc.MetricsStub(channel)
|
|
req = metrics_pb2.WorkoutMetricsRequest(
|
|
filename = csvfile,
|
|
ftp=ftp,
|
|
sex=sex,
|
|
hrftp=hrftp,
|
|
hrmax=hrmax,
|
|
hrmin=hrmin,
|
|
)
|
|
response = stub.CalcMetrics(req,timeout=60)
|
|
|
|
tss = response.tss
|
|
normp = response.normp
|
|
trimp = response.trimp
|
|
normv = response.normv
|
|
normw = response.normw
|
|
hrtss = response.hrtss
|
|
|
|
if np.isnan(tss):
|
|
tss = 0
|
|
|
|
if np.isnan(normp):
|
|
normp = 0
|
|
|
|
if np.isnan(trimp):
|
|
trimp = 0
|
|
|
|
if np.isnan(normv):
|
|
normv = 0
|
|
|
|
if np.isnan(normw):
|
|
normw = 0
|
|
|
|
if np.isnan(hrtss):
|
|
hrtss = 0
|
|
|
|
if tss > 1000:
|
|
tss = 0
|
|
|
|
if trimp > 1000:
|
|
trimp = 0
|
|
|
|
if normp > 2000:
|
|
normp = 0
|
|
|
|
if normv > 2000:
|
|
normv = 0
|
|
|
|
if normw > 10000:
|
|
normw = 0
|
|
|
|
if hrtss > 1000:
|
|
hrtss = 0
|
|
|
|
|
|
query = 'UPDATE rowers_workout SET rscore = {tss}, normp = {normp}, trimp={trimp}, hrtss={hrtss}, normv={normv}, normw={normw} WHERE id={id}'.format(
|
|
tss = int(tss),
|
|
normp = int(normp),
|
|
trimp = int(trimp),
|
|
hrtss = int(hrtss),
|
|
normv=normv,
|
|
normw=normw,
|
|
id = id,
|
|
)
|
|
|
|
with engine.connect() as conn, conn.begin():
|
|
result = conn.execute(query)
|
|
conn.close()
|
|
engine.dispose()
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_updatedps(useremail, workoutids, debug=False,**kwargs):
|
|
for wid, f1 in workoutids:
|
|
havedata = 1
|
|
try:
|
|
rowdata = rdata(csvfile=f1)
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.csv')
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.gz')
|
|
except IOError:
|
|
havedata = 0
|
|
|
|
if havedata:
|
|
update_strokedata(wid, rowdata.df, debug=debug)
|
|
|
|
|
|
subject = "Rowsandall.com Your Distance per Stroke metric has been updated"
|
|
message = "All your workouts now have Distance per Stroke"
|
|
|
|
email = EmailMessage(subject, message,
|
|
'Rowsandall <info@rowsandall.com>',
|
|
[useremail])
|
|
|
|
if 'emailbounced' in kwargs:
|
|
emailbounced = kwargs['emailbounced']
|
|
else:
|
|
emailbounced = False
|
|
|
|
if not emailbounced:
|
|
res = email.send()
|
|
|
|
return 1
|
|
|
|
import math
|
|
def sigdig(value, digits = 3):
|
|
try:
|
|
order = int(math.floor(math.log10(math.fabs(value))))
|
|
except (ValueError,TypeError):
|
|
return value
|
|
|
|
# return integers as is
|
|
if value % 1 == 0:
|
|
return value
|
|
|
|
places = digits - order - 1
|
|
if places > 0:
|
|
fmtstr = "%%.%df" % (places)
|
|
else:
|
|
fmtstr = "%.0f"
|
|
return fmtstr % (round(value, places))
|
|
|
|
|
|
|
|
@app.task
|
|
def handle_send_email_alert(
|
|
useremail, userfirstname, userlastname, rowerfirstname, alertname, stats, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
if 'othertexts' in kwargs:
|
|
othertexts = kwargs['othertexts']
|
|
else:
|
|
othertexts = None
|
|
|
|
report = {}
|
|
try:
|
|
report['Percentage'] = int(stats['percentage'])
|
|
except KeyError:
|
|
pass
|
|
|
|
try:
|
|
report['Number of workouts'] = int(stats['workouts'])
|
|
except KeyError:
|
|
pass
|
|
|
|
try:
|
|
report['Data set'] = "{a} strokes out of {b}".format(
|
|
a = stats['nr_strokes_qualifying'],
|
|
b = stats['nr_strokes']
|
|
)
|
|
except KeyError:
|
|
pass
|
|
|
|
|
|
try:
|
|
report['Median'] = sigdig(stats['median'])
|
|
except KeyError:
|
|
pass
|
|
|
|
try:
|
|
report['Median of qualifying strokes'] = sigdig(stats['median_q'])
|
|
except KeyError:
|
|
pass
|
|
|
|
subject = "Rowsandall.com: {alertname} ({startdate} to {enddate})".format(
|
|
startdate = stats['startdate'],
|
|
enddate = stats['enddate'],
|
|
alertname=alertname,
|
|
)
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'report':report,
|
|
'first_name':userfirstname,
|
|
'last_name':userlastname,
|
|
'startdate':stats['startdate'],
|
|
'enddate':stats['enddate'],
|
|
'siteurl':siteurl,
|
|
'rowerfirstname':rowerfirstname,
|
|
'alertname':alertname,
|
|
'othertexts':othertexts,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],subject,
|
|
'alertemail.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_email_transaction(
|
|
username, useremail, amount, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Rowsandall Payment Confirmation"
|
|
|
|
from_email = 'Rowsandall <admin@rowsandall.com>'
|
|
|
|
d = {
|
|
'name': username,
|
|
'siteurl': siteurl,
|
|
'amount': amount
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'paymentconfirmationemail.html',
|
|
d, **kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_email_failed_cancel(
|
|
name, email, username, id, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Rowsandall Subscription Cancellation Error"
|
|
|
|
from_email = 'Rowsandall <admin@rowsandall.com>'
|
|
|
|
d = {
|
|
'name': name,
|
|
'siteurl': siteurl,
|
|
'email': email,
|
|
'username': username,
|
|
'id': id,
|
|
}
|
|
|
|
res = send_template_email(from_email,["support@rowsandall.com"],
|
|
subject,
|
|
'cancel_subscription_fail_email.html',
|
|
d, **kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_send_email_subscription_update(
|
|
username, useremail, planname, recurring, price, amount,
|
|
end_of_billing_period, method, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
|
|
from_email = 'Rowsandall <admin@rowsandall.com>'
|
|
|
|
d = {
|
|
'name': username,
|
|
'siteurl': siteurl,
|
|
'amount': amount,
|
|
'price':price,
|
|
'planname': planname,
|
|
'recurring': recurring,
|
|
'end_of_billing_period': end_of_billing_period,
|
|
}
|
|
|
|
if method == 'down':
|
|
template_name = 'subscription_downgrade_email.html'
|
|
notification_template_name = 'subscription_downgrade_notification.html'
|
|
subject = "Rowsandall Change Confirmation"
|
|
else:
|
|
template_name = 'subscription_update_email.html'
|
|
notification_template_name = 'subscription_update_notification.html'
|
|
subject = "Rowsandall Payment Confirmation"
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
template_name,
|
|
d, **kwargs)
|
|
|
|
res = send_template_email(from_email,['info@rowsandall.com'],
|
|
'Subscription Update Notification',
|
|
notification_template_name,
|
|
d, **kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_email_subscription_create(
|
|
username, useremail, planname, recurring, price, amount,
|
|
end_of_billing_period, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Rowsandall Payment Confirmation"
|
|
|
|
from_email = 'Rowsandall <admin@rowsandall.com>'
|
|
|
|
|
|
d = {
|
|
'name': username,
|
|
'siteurl': siteurl,
|
|
'amount': amount,
|
|
'price':price,
|
|
'planname': planname,
|
|
'end_of_billing_period': end_of_billing_period,
|
|
'recurring': recurring,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'subscription_create_email.html',
|
|
d, **kwargs)
|
|
|
|
res = send_template_email(from_email,['info@rowsandall.com'],
|
|
'Subscription Update Notification',
|
|
'subscription_create_notification.html',
|
|
d, **kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_raceregistration(
|
|
useremail, username, registeredname, racename, raceid, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "A new competitor has registered for virtual challenge {n}".format(
|
|
n = racename
|
|
)
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'registeredname':registeredname,
|
|
'siteurl':siteurl,
|
|
'racename':racename,
|
|
'raceid':raceid,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'raceregisteredemail.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
def handle_sendemail_coursefail(
|
|
useremail, username, logfile, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "The validation of your course has failed"
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'trajectoryfailemail.html',
|
|
d,
|
|
cc=['info@rowsandall.com'],
|
|
attach_file=logfile,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_optout(
|
|
useremail, username, registeredname, racename, raceid, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "{name} has opted out from social media posts around challenge {n}".format(
|
|
n = racename,
|
|
name = registeredname
|
|
)
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'registeredname':registeredname,
|
|
'siteurl':siteurl,
|
|
'racename':racename,
|
|
'raceid':raceid,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'raceoptoutsocialmedia.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_racesubmission(
|
|
useremail, username, registeredname, racename, raceid, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "A new result has been submitted for virtual challenge {n}".format(
|
|
n = racename
|
|
)
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'siteurl':siteurl,
|
|
'registeredname':registeredname,
|
|
'racename':racename,
|
|
'raceid':raceid,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'racesubmissionemail.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_disqualification_email(
|
|
useremail,username,reason,message, racename, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Your result for {n} has been disqualified on rowsandall.com".format(
|
|
n = racename
|
|
)
|
|
|
|
from_email = 'Rowsandall <support@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'reason':reason,
|
|
'siteurl':siteurl,
|
|
'message': htmlstrip(message),
|
|
'racename':racename,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'disqualificationemail.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_withdraw_email(
|
|
useremail,username,reason,message, racename, **kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = True
|
|
|
|
subject = "Your result for {n} has been removed on rowsandall.com".format(
|
|
n = racename
|
|
)
|
|
|
|
from_email = 'Rowsandall <support@rowsandall.com>'
|
|
|
|
d = {
|
|
'username':username,
|
|
'reason':reason,
|
|
'siteurl':siteurl,
|
|
'message': htmlstrip(message),
|
|
'racename':racename,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,
|
|
'withdraw_email.html',
|
|
d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_expired(useremail,userfirstname,userlastname,expireddate,
|
|
**kwargs):
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
subject = "Your rowsandall.com paid account has expired"
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'first_name':userfirstname,
|
|
'last_name':userlastname,
|
|
'siteurl':siteurl,
|
|
'expireddate':expireddate,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,'accountexpiredemail.html',
|
|
d,cc=['support@rowsandall.com'],**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_breakthrough(workoutid, useremail,
|
|
userfirstname, userlastname,
|
|
btvalues=pd.DataFrame().to_json(),
|
|
**kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
btvalues = pd.read_json(btvalues)
|
|
btvalues.sort_values('delta', axis=0, inplace=True)
|
|
|
|
tablevalues = [
|
|
{'delta': t.delta,
|
|
'cpvalue': t.cpvalues,
|
|
'pwr': t.pwr
|
|
} for t in btvalues.itertuples()
|
|
]
|
|
|
|
# send email with attachment
|
|
subject = "A breakthrough workout on rowsandall.com"
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'first_name':userfirstname,
|
|
'siteurl':siteurl,
|
|
'workoutid':encoder.encode_hex(workoutid),
|
|
'btvalues':tablevalues,
|
|
}
|
|
|
|
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,'breakthroughemail.html',
|
|
d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
# send email when a breakthrough workout is uploaded
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_hard(workoutid, useremail,
|
|
userfirstname, userlastname,
|
|
btvalues=pd.DataFrame().to_json(),
|
|
debug=False,**kwargs):
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
|
|
btvalues = pd.read_json(btvalues)
|
|
btvalues.sort_values('delta', axis=0, inplace=True)
|
|
|
|
tablevalues = [
|
|
{'delta': t.delta,
|
|
'cpvalue': t.cpvalues,
|
|
'pwr': t.pwr
|
|
} for t in btvalues.itertuples()
|
|
]
|
|
|
|
# send email with attachment
|
|
subject = "That was a pretty hard workout on rowsandall.com"
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'first_name':userfirstname,
|
|
'siteurl':siteurl,
|
|
'workoutid':encoder.encode_hex(workoutid),
|
|
'btvalues':tablevalues,
|
|
}
|
|
|
|
res = send_template_email(from_email,[useremail],
|
|
subject,'hardemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
# send email when user deletes account
|
|
@app.task
|
|
def handle_sendemail_userdeleted(name, email, debug=False, **kwargs):
|
|
fullemail = 'roosendaalsander@gmail.com'
|
|
subject = 'User account deleted'
|
|
message = 'Sander,\n\n'
|
|
message += 'The user {name} ({email}) has just deleted his account'.format(
|
|
name=name,
|
|
email=email
|
|
)
|
|
email = EmailMessage(subject,message,
|
|
'Rowsandall <info@rowsandall.com>',
|
|
[fullemail])
|
|
|
|
if 'emailbounced' in kwargs:
|
|
emailbounced = kwargs['emailbounced']
|
|
else:
|
|
emailbounced = False
|
|
|
|
if not emailbounced:
|
|
res = email.send()
|
|
|
|
|
|
return 1
|
|
|
|
# send email to me when an unrecognized file is uploaded
|
|
@app.task
|
|
def handle_sendemail_unrecognized(unrecognizedfile, useremail,
|
|
debug=False,**kwargs):
|
|
|
|
# send email with attachment
|
|
fullemail = 'roosendaalsander@gmail.com'
|
|
subject = "Unrecognized file from Rowsandall.com"
|
|
message = "Dear Sander,\n\n"
|
|
message += "Please find attached a file that someone tried to upload to rowsandall.com. The file was not recognized as a valid file type.\n\n"
|
|
message += "User Email " + useremail + "\n\n"
|
|
message += "Best Regards, the Rowsandall Team"
|
|
|
|
email = EmailMessage(subject, message,
|
|
'Rowsandall <info@rowsandall.com>',
|
|
[fullemail])
|
|
|
|
try:
|
|
email.attach_file(unrecognizedfile)
|
|
except IOError:
|
|
pass
|
|
|
|
if 'emailbounced' in kwargs:
|
|
emailbounced = kwargs['emailbounced']
|
|
else:
|
|
emailbounced = False
|
|
|
|
if not emailbounced:
|
|
res = email.send()
|
|
|
|
|
|
# remove tcx file
|
|
try:
|
|
os.remove(unrecognizedfile)
|
|
except:
|
|
pass
|
|
|
|
return 1
|
|
|
|
|
|
# send email to owner when an unrecognized file is uploaded
|
|
@app.task
|
|
def handle_sendemail_unrecognizedowner(useremail, userfirstname,
|
|
debug=False,**kwargs):
|
|
|
|
# send email with attachment
|
|
fullemail = useremail
|
|
subject = "Unrecognized file from Rowsandall.com"
|
|
|
|
|
|
d = {
|
|
'first_name':userfirstname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'unrecognizedemail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemailics(first_name, last_name, email, icsfile, **kwargs):
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "Calendar File from Rowsandall.com"
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'icsemail.html',d,
|
|
attach_file=icsfile,**kwargs)
|
|
|
|
os.remove(icsfile)
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemailkml(first_name, last_name, email, kmlfile,**kwargs):
|
|
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'kmlemail.html',d,
|
|
attach_file=kmlfile,**kwargs)
|
|
|
|
os.remove(kmlfile)
|
|
return 1
|
|
|
|
# Send email with TCX attachment
|
|
@app.task
|
|
def handle_sendemailtcx(first_name, last_name, email, tcxfile,**kwargs):
|
|
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'tcxemail.html',d,
|
|
attach_file=tcxfile,**kwargs)
|
|
|
|
os.remove(tcxfile)
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_zip_file(emailfrom, subject, file,**kwargs):
|
|
message = "... zip processing ... "
|
|
try:
|
|
debug = kwargs['debug']
|
|
except KeyError:
|
|
debug = False
|
|
|
|
if debug:
|
|
print(message)
|
|
|
|
email = EmailMessage(subject, message,
|
|
emailfrom,
|
|
['workouts@rowsandall.com'])
|
|
email.attach_file(file)
|
|
if debug:
|
|
print("attaching")
|
|
|
|
|
|
res = email.send()
|
|
|
|
|
|
if debug:
|
|
print("sent")
|
|
time.sleep(60)
|
|
return 1
|
|
|
|
# Send email with CSV attachment
|
|
|
|
@app.task
|
|
def handle_sendemailsummary(first_name, last_name, email, csvfile, **kwargs):
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'summarymail.html',d,
|
|
attach_file=csvfile,
|
|
**kwargs)
|
|
|
|
try:
|
|
os.remove(csvfile)
|
|
except:
|
|
pass
|
|
|
|
return 1
|
|
|
|
#from rowers.emails import sendemail
|
|
|
|
@app.task
|
|
def handle_sendemailcsv(first_name, last_name, email, csvfile,**kwargs):
|
|
|
|
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'csvemail.html',d,
|
|
attach_file=csvfile,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_ical(first_name, last_name, email, url, icsfile, **kwargs):
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "Calendar File for your sessions from Rowsandall.com"
|
|
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
progressurl = SITE_URL_DEV
|
|
siteurl = SITE_URL_DEV
|
|
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
'url':url,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'icsemail.html',d,
|
|
attach_file=icsfile,**kwargs)
|
|
|
|
|
|
try:
|
|
os.remove(csvfile)
|
|
except:
|
|
pass
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemailfile(first_name, last_name, email, csvfile,**kwargs):
|
|
|
|
|
|
# send email with attachment
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
subject = "File from Rowsandall.com"
|
|
|
|
d = {'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'fileemail.html',d,
|
|
attach_file=csvfile,**kwargs)
|
|
|
|
|
|
if 'delete' in kwargs:
|
|
dodelete = kwargs['delete']
|
|
else:
|
|
dodelete = False
|
|
|
|
if dodelete:
|
|
try:
|
|
os.remove(csvfile)
|
|
except:
|
|
pass
|
|
|
|
return 1
|
|
|
|
# Calculate wind and stream corrections for OTW rowing
|
|
|
|
|
|
@app.task(bind=True)
|
|
def handle_otwsetpower(self,f1, boattype, weightvalue,
|
|
first_name, last_name, email, workoutid,
|
|
**kwargs):
|
|
|
|
job = self.request
|
|
job_id = job.id
|
|
|
|
if 'jobkey' in kwargs:
|
|
job_id = kwargs.pop('jobkey')
|
|
if 'ps' in kwargs:
|
|
ps = kwargs['ps']
|
|
else:
|
|
ps = [1,1,1,1]
|
|
|
|
if 'ratio' in kwargs:
|
|
ratio = kwargs['ratio']
|
|
else:
|
|
ratio = 1.0
|
|
if 'debug' in kwargs:
|
|
debug = kwargs['debug']
|
|
else:
|
|
debug = False
|
|
|
|
kwargs['jobid'] = job_id
|
|
|
|
|
|
|
|
|
|
weightvalue = float(weightvalue)
|
|
|
|
# check what the real file name is
|
|
if os.path.exists(f1):
|
|
csvfile = f1
|
|
elif os.path.exists(f1+'.csv'):
|
|
csvfile = f1+'.csv'
|
|
elif os.path.exists(f1+'.gz'):
|
|
csvfile = f1+'.gz'
|
|
|
|
csvfile = os.path.abspath(csvfile)
|
|
|
|
# do something with boat type
|
|
try:
|
|
rowdata = rdata(csvfile)
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile)
|
|
except IOError:
|
|
rowdata = rdata(csvfile)
|
|
|
|
# do calculation, but do not overwrite NK Empower Power data
|
|
powermeasured = False
|
|
try:
|
|
w = rowdata.df['wash']
|
|
if w.mean() != 0:
|
|
powermeasured = True
|
|
except KeyError:
|
|
pass
|
|
|
|
progressurl = SITE_URL
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
progressurl = SITE_URL_DEV
|
|
siteurl = SITE_URL_DEV
|
|
secret = PROGRESS_CACHE_SECRET
|
|
|
|
progressurl += "/rowers/record-progress/"
|
|
progressurl += job_id+'/'
|
|
|
|
goservice = True
|
|
|
|
if goservice:
|
|
# do something (this should return from go service)
|
|
with grpc.insecure_channel(
|
|
target='localhost:50051',
|
|
options=[('grpc.lb_policy_name', 'pick_first'),
|
|
('grpc.enable_retries', 0), ('grpc.keepalive_timeout_ms',
|
|
10000)]
|
|
) as channel:
|
|
try:
|
|
grpc.channel_ready_future(channel).result(timeout=10)
|
|
except grpc.FutureTimeoutError:
|
|
return 0
|
|
|
|
stub = calculator_pb2_grpc.PowerStub(channel)
|
|
response = stub.CalcPower(calculator_pb2.WorkoutPowerRequest(
|
|
filename = csvfile,
|
|
boattype = boattype,
|
|
crewmass = weightvalue,
|
|
powermeasured = powermeasured,
|
|
progressurl = progressurl,
|
|
secret = secret,
|
|
silent = False,
|
|
),timeout=1200)
|
|
result = response.result
|
|
if result == 0:
|
|
# send failure email
|
|
return 0
|
|
# do something with boat type
|
|
try:
|
|
rowdata = rdata(csvfile)
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile)
|
|
except IOError:
|
|
rowdata = rdata(csvfile)
|
|
|
|
else:
|
|
boatfile = {
|
|
'1x': 'static/rigging/1x.txt',
|
|
'2x': 'static/rigging/2x.txt',
|
|
'2-': 'static/rigging/2-.txt',
|
|
'4x': 'static/rigging/4x.txt',
|
|
'4-': 'static/rigging/4-.txt',
|
|
'8+': 'static/rigging/8+.txt',
|
|
}
|
|
try:
|
|
rg = rowingdata.getrigging(boatfile[boattype])
|
|
except KeyError:
|
|
rg = rowingdata.getrigging('static/rigging/1x.txt')
|
|
|
|
# determine cache file name
|
|
physics_cache = 'media/'+str(boattype)+'_'+str(int(weightvalue))
|
|
|
|
|
|
|
|
rowdata.otw_setpower(skiprows=5, mc=weightvalue, rg=rg,
|
|
powermeasured=powermeasured,
|
|
progressurl=progressurl,
|
|
secret=secret,
|
|
silent=True,
|
|
usetable=usetable,storetable=physics_cache,
|
|
)
|
|
|
|
# save data
|
|
rowdata.write_csv(f1, gzip=True)
|
|
|
|
# continuing for both
|
|
update_strokedata(workoutid, rowdata.df, debug=debug)
|
|
|
|
totaltime = rowdata.df['TimeStamp (sec)'].max(
|
|
) - rowdata.df['TimeStamp (sec)'].min()
|
|
try:
|
|
totaltime = totaltime + rowdata.df.loc[0, ' ElapsedTime (sec)']
|
|
except KeyError:
|
|
pass
|
|
df = getsmallrowdata_db(
|
|
['power', 'workoutid', 'time'], ids=[workoutid],
|
|
debug=debug)
|
|
thesecs = totaltime
|
|
maxt = 1.05 * thesecs
|
|
logarr = datautils.getlogarr(maxt)
|
|
dfgrouped = df.groupby(['workoutid'])
|
|
delta, cpvalues, avgpower = datautils.getcp(dfgrouped, logarr)
|
|
|
|
#delta,cpvalues,avgpower = datautils.getsinglecp(rowdata.df)
|
|
res, btvalues, res2 = utils.isbreakthrough(
|
|
delta, cpvalues, ps[0], ps[1], ps[2], ps[3], ratio)
|
|
if res:
|
|
handle_sendemail_breakthrough(
|
|
workoutid, email,
|
|
first_name,
|
|
last_name, btvalues=btvalues.to_json())
|
|
|
|
|
|
subject = "Your OTW Physics Calculations are ready"
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
|
|
|
|
|
d = {
|
|
'first_name':first_name,
|
|
'siteurl':siteurl,
|
|
'workoutid':encoder.encode_hex(workoutid),
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'otwpoweremail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_updateergcp(rower_id,workoutfilenames,debug=False,**kwargs):
|
|
therows = []
|
|
for f1 in workoutfilenames:
|
|
try:
|
|
rowdata = rdata(csvfile=f1)
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.csv')
|
|
except IOError:
|
|
try:
|
|
rowdata = rdata(csvfile=f1 + '.gz')
|
|
except IOError:
|
|
rowdata = 0
|
|
if rowdata != 0:
|
|
therows.append(rowdata)
|
|
|
|
cpdata = rowingdata.cumcpdata(therows)
|
|
cpdata.columns = cpdata.columns.str.lower()
|
|
|
|
updatecpdata_sql(rower_id,cpdata['delta'],cpdata['cp'],
|
|
table='ergcpdata',distance=cpdata['distance'],
|
|
debug=debug)
|
|
|
|
return 1
|
|
|
|
def cp_from_workoutids(workoutids,debug=False):
|
|
columns = ['power','workoutid','time']
|
|
df = getsmallrowdata_db(columns,ids=workoutids,debug=debug)
|
|
df.dropna(inplace=True,axis=0)
|
|
|
|
if df.empty:
|
|
# change this
|
|
return 0,0,0
|
|
|
|
# df is not empty. We continue
|
|
dfgrouped = df.groupby(['workoutid'])
|
|
maxt = 1.05*df['time'].max()/1000.
|
|
|
|
logarr = datautils.getlogarr(maxt)
|
|
|
|
delta,cpvalue,avgpower = datautils.getcp(dfgrouped,logarr)
|
|
|
|
powerdf = pd.DataFrame({
|
|
'Delta':delta,
|
|
'CP':cpvalue,
|
|
})
|
|
|
|
powerdf = powerdf[powerdf['CP']>0]
|
|
powerdf.dropna(axis=0,inplace=True)
|
|
powerdf.sort_values(['Delta','CP'],ascending=[1,0],inplace=True)
|
|
powerdf.drop_duplicates(subset='Delta',keep='first',inplace=True)
|
|
|
|
p1,fitt,fitpower,ratio = datautils.cpfit(powerdf)
|
|
# This is code duplication from datautils -- correct asap
|
|
fitfunc = lambda pars,x: abs(pars[0])/(1+(x/abs(pars[2]))) + abs(pars[1])/(1+(x/abs(pars[3])))
|
|
|
|
powerfourmin = fitfunc(p1,240.)
|
|
powerhour = fitfunc(p1,3600.)
|
|
|
|
# 2k power
|
|
velofourmin = (powerfourmin/2.8)**(1./3.)
|
|
dfourmin = 240.*velofourmin
|
|
dratio = 2000./dfourmin
|
|
pacefourmin = 500./velofourmin
|
|
|
|
# assume 5 sec per doubling drop
|
|
pace2k = pacefourmin + 5.*np.log10(dratio)/np.log10(2.)
|
|
velo2k = 500./pace2k
|
|
t2k = 2000./velo2k
|
|
pwr2k = fitfunc(p1,t2k)
|
|
velo2 = (pwr2k/2.8)**(1./3.)
|
|
if np.isnan(velo2) or velo2 <= 0:
|
|
velo2 = 1.0
|
|
|
|
t2 = 2000./velo2
|
|
|
|
pwr2k = fitfunc(p1,t2)
|
|
|
|
velo3 = (pwr2k/2.8)**(1./3.)
|
|
|
|
t3 = 2000./velo3
|
|
|
|
power2k = fitfunc(p1,t3)
|
|
|
|
return powerfourmin,power2k,powerhour
|
|
|
|
|
|
@app.task
|
|
def handle_updatefitnessmetric(user_id,mode,workoutids,debug=False,
|
|
**kwargs):
|
|
|
|
powerfourmin = -1
|
|
power2k = -1
|
|
powerhour = -1
|
|
|
|
mdict = {
|
|
'user_id': user_id,
|
|
'PowerFourMin': powerfourmin,
|
|
'PowerTwoK': power2k,
|
|
'PowerOneHour': powerhour,
|
|
'workoutmode': mode,
|
|
'last_workout': max(workoutids),
|
|
'date': timezone.now().strftime('%Y-%m-%d'),
|
|
}
|
|
|
|
result = fitnessmetric_to_sql(mdict,debug=debug,doclean=False)
|
|
|
|
powerfourmin,power2k,powerhour = cp_from_workoutids(workoutids,debug=debug)
|
|
|
|
if powerfourmin > 0 and power2k > 0 and powerhour > 0:
|
|
|
|
mdict = {
|
|
'user_id': user_id,
|
|
'PowerFourMin': powerfourmin,
|
|
'PowerTwoK': power2k,
|
|
'PowerOneHour': powerhour,
|
|
'workoutmode': mode,
|
|
'last_workout': max(workoutids),
|
|
'date': timezone.now().strftime('%Y-%m-%d'),
|
|
}
|
|
|
|
result = fitnessmetric_to_sql(mdict,debug=debug,doclean=True)
|
|
else:
|
|
result = 0
|
|
|
|
return result
|
|
|
|
@app.task
|
|
def handle_updatecp(rower_id,workoutids,debug=False,table='cpdata',**kwargs):
|
|
columns = ['power','workoutid','time']
|
|
df = getsmallrowdata_db(columns,ids=workoutids,debug=debug)
|
|
|
|
if df.empty:
|
|
return 0
|
|
|
|
maxt = 1.05*df['time'].max()/1000.
|
|
|
|
logarr = datautils.getlogarr(maxt)
|
|
|
|
dfgrouped = df.groupby(['workoutid'])
|
|
|
|
delta,cpvalue,avgpower = datautils.getcp(dfgrouped,logarr)
|
|
|
|
updatecpdata_sql(rower_id,delta,cpvalue,debug=debug,table=table)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_makeplot(f1, f2, t, hrdata, plotnr, imagename,
|
|
debug=False,**kwargs):
|
|
|
|
hrmax = hrdata['hrmax']
|
|
hrut2 = hrdata['hrut2']
|
|
hrut1 = hrdata['hrut1']
|
|
hrat = hrdata['hrat']
|
|
hrtr = hrdata['hrtr']
|
|
hran = hrdata['hran']
|
|
ftp = hrdata['ftp']
|
|
powerzones = deserialize_list(hrdata['powerzones'])
|
|
powerperc = np.array(deserialize_list(hrdata['powerperc'])).astype(float)
|
|
|
|
rr = rowingdata.rower(hrmax=hrmax, hrut2=hrut2,
|
|
hrut1=hrut1, hrat=hrat,
|
|
hrtr=hrtr, hran=hran,
|
|
ftp=ftp, powerperc=powerperc,
|
|
powerzones=powerzones)
|
|
try:
|
|
row = rdata(csvfile=f2, rower=rr)
|
|
except IOError:
|
|
row = rdata(csvfile=f2 + '.gz', rower=rr)
|
|
|
|
try:
|
|
haspower = row.df[' Power (watts)'].mean() > 50
|
|
except (TypeError, KeyError):
|
|
haspower = False
|
|
|
|
oterange = kwargs.pop('oterange',[85,240])
|
|
otwrange = kwargs.pop('otwrange',[85,185])
|
|
|
|
|
|
nr_rows = len(row.df)
|
|
if (plotnr in [1, 2, 4, 5, 8, 11, 9, 12]) and (nr_rows > 1200):
|
|
bin = int(nr_rows / 1200.)
|
|
df = row.df.groupby(lambda x: x / bin).mean()
|
|
row.df = df
|
|
nr_rows = len(row.df)
|
|
if (plotnr == 1):
|
|
fig1 = row.get_timeplot_erg(t,pacerange=oterange,**kwargs)
|
|
elif (plotnr == 2):
|
|
fig1 = row.get_metersplot_erg(t,pacerange=oterange,**kwargs)
|
|
elif (plotnr == 3):
|
|
try:
|
|
t += ' - Heart Rate Distribution'
|
|
except TypeError:
|
|
t = 'Heart Rate Distribution'
|
|
fig1 = row.get_piechart(t,**kwargs)
|
|
elif (plotnr == 4):
|
|
if haspower:
|
|
fig1 = row.get_timeplot_otwempower(t,pacerange=otwrange,**kwargs)
|
|
else:
|
|
fig1 = row.get_timeplot_otw(t,pacerange=otwrange,**kwargs)
|
|
elif (plotnr == 5):
|
|
if haspower:
|
|
fig1 = row.get_metersplot_otwempower(t,pacerange=otwrange,**kwargs)
|
|
else:
|
|
fig1 = row.get_metersplot_otw(t,pacerange=otwrange,**kwargs)
|
|
elif (plotnr == 6):
|
|
t += ' - Heart Rate Distribution'
|
|
fig1 = row.get_piechart(t,**kwargs)
|
|
elif (plotnr == 7) or (plotnr == 10):
|
|
fig1 = row.get_metersplot_erg2(t,**kwargs)
|
|
elif (plotnr == 8) or (plotnr == 11):
|
|
fig1 = row.get_timeplot_erg2(t,**kwargs)
|
|
elif (plotnr == 9) or (plotnr == 12):
|
|
fig1 = row.get_time_otwpower(t,pacerange=otwrange,**kwargs)
|
|
elif (plotnr == 13) or (plotnr == 16):
|
|
t += ' - Power Distribution'
|
|
fig1 = row.get_power_piechart(t,**kwargs)
|
|
|
|
if fig1 is None:
|
|
return 0
|
|
|
|
|
|
canvas = FigureCanvas(fig1)
|
|
|
|
canvas.print_figure('static/plots/' + imagename)
|
|
plt.close(fig1)
|
|
fig1.clf()
|
|
gc.collect()
|
|
|
|
return imagename
|
|
|
|
# Team related remote tasks
|
|
|
|
@app.task
|
|
def handle_sendemail_coachrequest(email,name,code,coachname,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = 'Invitation to add {n} to your athletes'.format(n=name)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'code':code,
|
|
'siteurl':siteurl
|
|
}
|
|
|
|
form_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachrequestemail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coachoffer_rejected(coachemail,coachname,name,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = coachemail
|
|
subject = '{n} has rejected your offer to be his coach on rowsandall.com'.format(n=name)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachofferrejectedemail.html',
|
|
d,
|
|
**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coachrequest_rejected(email,coachname,name,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = '{n} has rejected your coaching request on rowsandall.com'.format(n=coachname)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachrequestrejectedemail.html',
|
|
d,
|
|
**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coachrequest_accepted(email,coachname,name,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = '{n} has accepted your coaching request on rowsandall.com'.format(n=coachname)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachrequestacceptedemail.html',
|
|
d,
|
|
**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coachoffer_accepted(coachemail,coachname,name,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = coachemail
|
|
subject = '{n} has accepted your coaching offer on rowsandall.com'.format(n=name)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'siteurl':siteurl,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coachofferacceptedemail.html',
|
|
d,
|
|
**kwargs)
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_coacheerequest(email,name,code,coachname,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = '{n} requests coach access to your data on rowsandall.com'.format(n=coachname)
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'coach':coachname,
|
|
'code':code,
|
|
'siteurl':siteurl
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'coacheerequestemail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_invite(email, name, code, teamname, manager,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Invitation to join team ' + teamname
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'name':name,
|
|
'manager':manager,
|
|
'code':code,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl
|
|
}
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
res = send_template_email(from_email,[fullemail],
|
|
subject,'teaminviteemail.html',d,
|
|
**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemailnewresponse(first_name, last_name,
|
|
email,
|
|
commenter_first_name,
|
|
commenter_last_name,
|
|
comment,
|
|
workoutname, workoutid, commentid,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
subject = 'New comment on session ' + workoutname
|
|
|
|
comment = u''+comment
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
sessiontype = 'workout'
|
|
if 'sessiontype' in kwargs:
|
|
sessiontype=kwargs.pop('sessiontype')
|
|
|
|
commentlink = '/rowers/workout/{workoutid}/comment/'.format(
|
|
workoutid=encoder.encode_hex(workoutid))
|
|
if 'commentlink' in kwargs:
|
|
commentlink = kwargs.pop('commentlink')
|
|
|
|
d = {
|
|
'first_name':first_name,
|
|
'commenter_first_name':commenter_first_name,
|
|
'commenter_last_name':commenter_last_name,
|
|
'comment':comment,
|
|
'workoutname':workoutname,
|
|
'siteurl':siteurl,
|
|
'workoutid':workoutid,
|
|
'commentid':commentid,
|
|
'sessiontype':sessiontype,
|
|
'commentlink':commentlink,
|
|
}
|
|
|
|
res = send_template_email(from_email,
|
|
[fullemail],
|
|
subject,'teamresponseemail.html',d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemailnewcomment(first_name,
|
|
last_name,
|
|
email,
|
|
commenter_first_name,
|
|
commenter_last_name,
|
|
comment, workoutname,
|
|
workoutid,
|
|
debug=False,**kwargs):
|
|
|
|
|
|
|
|
fullemail = email
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
subject = 'New comment on session ' + workoutname
|
|
|
|
comment = u''+comment
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
sessiontype = 'workout'
|
|
if 'sessiontype' in kwargs:
|
|
sessiontype=kwargs.pop('sessiontype')
|
|
|
|
commentlink = '/rowers/workout/{workoutid}/comment/'.format(
|
|
workoutid=encoder.encode_hex(workoutid))
|
|
if 'commentlink' in kwargs:
|
|
commentlink = kwargs.pop('commentlink')
|
|
|
|
d = {
|
|
'first_name':first_name,
|
|
'commenter_first_name':commenter_first_name,
|
|
'commenter_last_name':commenter_last_name,
|
|
'comment':comment,
|
|
'workoutname':workoutname,
|
|
'siteurl':siteurl,
|
|
'workoutid':encoder.encode_hex(workoutid),
|
|
'sessiontype':sessiontype,
|
|
'commentlink':commentlink,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamresponseemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_send_template_email(template,email,fromemail,rowername,
|
|
subject,message,debug=False,**kwargs):
|
|
|
|
fullemail = [email]
|
|
d = {
|
|
'message':message,
|
|
'rowername':rowername,
|
|
}
|
|
|
|
res = send_template_email('Rowsandall <info@rowsandall.com>',
|
|
['info@rowsandall.com'],subject,
|
|
template,d,cc=[fromemail],bcc=fullemail,**kwargs)
|
|
|
|
@app.task
|
|
def handle_sendemail_message(email,fromemail,rowername,message,teamname,managername,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = 'New message from team ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
d = {
|
|
'rowername':rowername,
|
|
'teamname':teamname,
|
|
'managername':managername,
|
|
'message':message,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teammessage.html',d,**kwargs)
|
|
|
|
return 1
|
|
|
|
@app.task
|
|
def handle_sendemail_request(email, name, code, teamname, requestor, id,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Request to join team ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'requestor':requestor,
|
|
'teamname':teamname,
|
|
'code': code,
|
|
'siteurl':siteurl,
|
|
'id':id,
|
|
'first_name':name,
|
|
}
|
|
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamrequestemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_request_accept(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Welcome to ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamwelcomeemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_request_reject(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Your application to ' + teamname + ' was rejected'
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamrejectemail.html',d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_member_dropped(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'You were removed from ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamdropemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_team_removed(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
|
|
fullemail = email
|
|
subject = 'You were removed from ' + teamname
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teamremoveemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_invite_reject(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Your invitation to ' + name + ' was rejected'
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teaminviterejectemail.html',d,**kwargs)
|
|
|
|
return 1
|
|
|
|
|
|
@app.task
|
|
def handle_sendemail_invite_accept(email, name, teamname, managername,
|
|
debug=False,**kwargs):
|
|
fullemail = email
|
|
subject = 'Your invitation to ' + name + ' was accepted'
|
|
|
|
from_email = 'Rowsandall <info@rowsandall.com>'
|
|
|
|
siteurl = SITE_URL
|
|
if debug:
|
|
siteurl = SITE_URL_DEV
|
|
|
|
d = {
|
|
'first_name':name,
|
|
'managername':managername,
|
|
'teamname':teamname,
|
|
'siteurl':siteurl,
|
|
}
|
|
res = send_template_email(from_email,[fullemail],subject,
|
|
'teaminviteacceptemail.html',d,**kwargs)
|
|
|
|
|
|
return 1
|
|
|
|
|
|
# Another simple task for debugging purposes
|
|
def add2(x, y,debug=False,**kwargs):
|
|
return x + y
|
|
|
|
@app.task
|
|
def fetch_strava_workout(stravatoken,oauth_data,stravaid,csvfilename,userid,debug=False,**kwargs):
|
|
fetchresolution = 'high'
|
|
authorizationstring = str('Bearer '+stravatoken)
|
|
headers = {'Authorization': authorizationstring,
|
|
'user-agent': 'sanderroosendaal',
|
|
'Content-Type': 'application/json',
|
|
'resolution': 'medium',}
|
|
url = "https://www.strava.com/api/v3/activities/"+str(stravaid)
|
|
response = requests.get(url,headers=headers)
|
|
if response.status_code != 200:
|
|
t = time.localtime()
|
|
timestamp = time.strftime('%b-%d-%Y_%H%M', t)
|
|
with open('strava_webhooks.log','a') as f:
|
|
f.write('\n')
|
|
f.write(timestamp)
|
|
f.write(' ')
|
|
f.write(url)
|
|
f.write(' ')
|
|
f.write(json.dumps(headers))
|
|
f.write(' ')
|
|
f.write(authorizationstring)
|
|
f.write(' ')
|
|
f.write('handle_get_strava_file response code {code}\n'.format(code=response.status_code))
|
|
f.write('Response json {json}\n'.format(json=response.json()))
|
|
|
|
return 0
|
|
workoutsummary = requests.get(url,headers=headers).json()
|
|
try:
|
|
startdatetime = workoutsummary['start_date']
|
|
except KeyError:
|
|
startdatetime = timezone.now()
|
|
|
|
spm = get_strava_stream(None,'cadence',stravaid,authorizationstring=authorizationstring)
|
|
hr = get_strava_stream(None,'heartrate',stravaid,authorizationstring=authorizationstring)
|
|
t = get_strava_stream(None,'time',stravaid,authorizationstring=authorizationstring)
|
|
velo = get_strava_stream(None,'velocity_smooth',stravaid,authorizationstring=authorizationstring)
|
|
d = get_strava_stream(None,'distance',stravaid,authorizationstring=authorizationstring)
|
|
coords = get_strava_stream(None,'latlng',stravaid,authorizationstring=authorizationstring)
|
|
power = get_strava_stream(None,'watts',stravaid,authorizationstring=authorizationstring)
|
|
|
|
if t is not None:
|
|
nr_rows = len(t)
|
|
else:
|
|
try:
|
|
duration = int(workoutsummary['elapsed_time'])
|
|
except KeyError:
|
|
duration = 0
|
|
t = pd.Series(range(duration+1))
|
|
|
|
nr_rows = len(t)
|
|
|
|
|
|
if nr_rows == 0:
|
|
return 0
|
|
|
|
if d is None:
|
|
d = 0*t
|
|
|
|
if spm is None:
|
|
spm = np.zeros(nr_rows)
|
|
|
|
if power is None:
|
|
power = np.zeros(nr_rows)
|
|
|
|
if hr is None:
|
|
hr = np.zeros(nr_rows)
|
|
|
|
if velo is None:
|
|
velo = np.zeros(nr_rows)
|
|
|
|
dt = np.diff(t).mean()
|
|
wsize = round(5./dt)
|
|
|
|
velo2 = ewmovingaverage(velo,wsize)
|
|
|
|
if coords is not None:
|
|
try:
|
|
lat = coords[:,0]
|
|
lon = coords[:,1]
|
|
except IndexError:
|
|
lat = np.zeros(len(t))
|
|
lon = np.zeros(len(t))
|
|
else:
|
|
lat = np.zeros(len(t))
|
|
lon = np.zeros(len(t))
|
|
|
|
|
|
|
|
|
|
strokelength = velo*60./(spm)
|
|
strokelength[np.isinf(strokelength)] = 0.0
|
|
|
|
|
|
pace = 500./(1.0*velo2)
|
|
pace[np.isinf(pace)] = 0.0
|
|
|
|
strokedata = pd.DataFrame({'t':10*t,
|
|
'd':10*d,
|
|
'p':10*pace,
|
|
'spm':spm,
|
|
'hr':hr,
|
|
'lat':lat,
|
|
'lon':lon,
|
|
'power':power,
|
|
'strokelength':strokelength,
|
|
})
|
|
|
|
try:
|
|
workouttype = mytypes.stravamappinginv[workoutsummary['type']]
|
|
except KeyError:
|
|
workouttype = 'other'
|
|
|
|
if workouttype.lower() == 'rowing':
|
|
workouttype = 'rower'
|
|
|
|
if 'summary_polyline' in workoutsummary['map'] and workouttype=='rower':
|
|
workouttype = 'water'
|
|
|
|
try:
|
|
comments = workoutsummary['comments']
|
|
except:
|
|
comments = ' '
|
|
|
|
try:
|
|
thetimezone = tz(workoutsummary['timezone'])
|
|
except:
|
|
thetimezone = 'UTC'
|
|
|
|
try:
|
|
rowdatetime = iso8601.parse_date(workoutsummary['date_utc'])
|
|
except KeyError:
|
|
rowdatetime = iso8601.parse_date(workoutsummary['start_date'])
|
|
except ParseError:
|
|
rowdatetime = iso8601.parse_date(workoutsummary['date'])
|
|
|
|
|
|
try:
|
|
intervaltype = workoutsummary['workout_type']
|
|
|
|
except KeyError:
|
|
intervaltype = ''
|
|
|
|
try:
|
|
title = workoutsummary['name']
|
|
except KeyError:
|
|
title = ""
|
|
try:
|
|
t = data['comments'].split('\n', 1)[0]
|
|
title += t[:20]
|
|
except:
|
|
title = 'Imported'
|
|
|
|
starttimeunix = arrow.get(rowdatetime).timestamp
|
|
|
|
res = make_cumvalues(0.1*strokedata['t'])
|
|
cum_time = res[0]
|
|
lapidx = res[1]
|
|
|
|
unixtime = cum_time+starttimeunix
|
|
seconds = 0.1*strokedata.loc[:,'t']
|
|
|
|
nr_rows = len(unixtime)
|
|
|
|
try:
|
|
latcoord = strokedata.loc[:,'lat']
|
|
loncoord = strokedata.loc[:,'lon']
|
|
if latcoord.std() == 0 and loncoord.std() == 0 and workouttype == 'water':
|
|
workouttype = 'rower'
|
|
except:
|
|
latcoord = np.zeros(nr_rows)
|
|
loncoord = np.zeros(nr_rows)
|
|
if workouttype == 'water':
|
|
workouttype = 'rower'
|
|
|
|
|
|
|
|
try:
|
|
strokelength = strokedata.loc[:,'strokelength']
|
|
except:
|
|
strokelength = np.zeros(nr_rows)
|
|
|
|
dist2 = 0.1*strokedata.loc[:,'d']
|
|
|
|
try:
|
|
spm = strokedata.loc[:,'spm']
|
|
except KeyError:
|
|
spm = 0*dist2
|
|
|
|
try:
|
|
hr = strokedata.loc[:,'hr']
|
|
except KeyError:
|
|
hr = 0*spm
|
|
pace = strokedata.loc[:,'p']/10.
|
|
pace = np.clip(pace,0,1e4)
|
|
pace = pace.replace(0,300)
|
|
|
|
velo = 500./pace
|
|
|
|
try:
|
|
power = strokedata.loc[:,'power']
|
|
except KeyError:
|
|
power = 2.8*velo**3
|
|
|
|
#if power.std() == 0 and power.mean() == 0:
|
|
# power = 2.8*velo**3
|
|
|
|
# save csv
|
|
# Create data frame with all necessary data to write to csv
|
|
df = pd.DataFrame({'TimeStamp (sec)':unixtime,
|
|
' Horizontal (meters)': dist2,
|
|
' Cadence (stokes/min)':spm,
|
|
' HRCur (bpm)':hr,
|
|
' longitude':loncoord,
|
|
' latitude':latcoord,
|
|
' Stroke500mPace (sec/500m)':pace,
|
|
' Power (watts)':power,
|
|
' DragFactor':np.zeros(nr_rows),
|
|
' DriveLength (meters)':np.zeros(nr_rows),
|
|
' StrokeDistance (meters)':strokelength,
|
|
' DriveTime (ms)':np.zeros(nr_rows),
|
|
' StrokeRecoveryTime (ms)':np.zeros(nr_rows),
|
|
' AverageDriveForce (lbs)':np.zeros(nr_rows),
|
|
' PeakDriveForce (lbs)':np.zeros(nr_rows),
|
|
' lapIdx':lapidx,
|
|
' ElapsedTime (sec)':seconds,
|
|
'cum_dist':dist2,
|
|
})
|
|
|
|
|
|
|
|
df.sort_values(by='TimeStamp (sec)',ascending=True)
|
|
|
|
row = rowingdata.rowingdata(df=df)
|
|
row.write_csv(csvfilename,gzip=False)
|
|
|
|
summary = row.allstats()
|
|
maxdist = df['cum_dist'].max()
|
|
duration = row.duration
|
|
|
|
uploadoptions = {
|
|
'secret':UPLOAD_SERVICE_SECRET,
|
|
'user':userid,
|
|
'file': csvfilename,
|
|
'title': title,
|
|
'workouttype':workouttype,
|
|
'boattype':'1x',
|
|
'stravaid':stravaid,
|
|
}
|
|
|
|
print(uploadoptions)
|
|
|
|
session = requests.session()
|
|
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
|
session.headers.update(newHeaders)
|
|
response = session.post(UPLOAD_SERVICE_URL,json=uploadoptions)
|
|
|
|
t = time.localtime()
|
|
timestamp = time.strftime('%b-%d-%Y_%H%M', t)
|
|
with open('strava_webhooks.log','a') as f:
|
|
f.write('\n')
|
|
f.write(timestamp)
|
|
f.write(' ')
|
|
f.write('fetch_strava_workout posted file with strava id {stravaid} user id {userid}\n'.format(
|
|
stravaid=stravaid,userid=userid))
|
|
|
|
|
|
return 1
|