4630 lines
136 KiB
Python
4630 lines
136 KiB
Python
import os
|
||
|
||
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
|
||
from YamJam import yamjam
|
||
CFG = yamjam()['rowsandallapp']
|
||
|
||
try:
|
||
os.environ.setdefault("DJANGO_SETTINGS_MODULE",CFG['settings_name'])
|
||
except KeyError: # pragma: no cover
|
||
os.environ.setdefault("DJANGO_SETTINGS_MODULE","rowsandall_app.settings")
|
||
|
||
from django.core.wsgi import get_wsgi_application
|
||
|
||
application = get_wsgi_application()
|
||
from rowers.models import (
|
||
Workout, GeoPolygon, GeoPoint, GeoCourse,
|
||
VirtualRaceResult, CourseTestResult, Rower,
|
||
GraphImage, Team, PlannedSession
|
||
)
|
||
from rowers.session_utils import is_session_complete
|
||
|
||
import math
|
||
from rowers.courseutils import (
|
||
coursetime_paths, coursetime_first, time_in_path,
|
||
InvalidTrajectoryError
|
||
)
|
||
from rowers.emails import send_template_email
|
||
from rowers.mytypes import intervalsmappinginv
|
||
from rowers.nkimportutils import (
|
||
get_nk_summary, get_nk_allstats, get_nk_intervalstats, getdict, strokeDataToDf,
|
||
add_workout_from_data
|
||
)
|
||
from rowers.utils import get_strava_stream
|
||
from stravalib.exc import ActivityUploadFailed
|
||
import stravalib
|
||
import arrow
|
||
import rowers.longtask as longtask
|
||
import requests
|
||
import rowers.datautils as datautils
|
||
from rowers.models import create_or_update_syncrecord
|
||
|
||
""" Background tasks done by QR (production) """
|
||
import time
|
||
import gc
|
||
import gzip
|
||
import shutil
|
||
import numpy as np
|
||
import re
|
||
import sys
|
||
import json
|
||
import traceback
|
||
from time import strftime
|
||
import base64
|
||
from io import BytesIO
|
||
|
||
from scipy import optimize
|
||
from scipy.signal import savgol_filter
|
||
from scipy.interpolate import griddata
|
||
|
||
import rowingdata
|
||
from rowingdata import make_cumvalues, make_cumvalues_array
|
||
from uuid import uuid4
|
||
from rowingdata import rowingdata as rdata
|
||
from rowingdata import FITParser as FP
|
||
from rowingdata.otherparsers import FitSummaryData
|
||
|
||
from datetime import timedelta
|
||
|
||
from rowers.celery import app
|
||
from celery import shared_task
|
||
|
||
import datetime
|
||
import pytz
|
||
import iso8601
|
||
from iso8601 import ParseError
|
||
|
||
from json.decoder import JSONDecodeError
|
||
from pytz.exceptions import UnknownTimeZoneError
|
||
|
||
from matplotlib.backends.backend_agg import FigureCanvas
|
||
|
||
import matplotlib.pyplot as plt
|
||
from matplotlib import path
|
||
|
||
import grpc
|
||
import rowers.otw_power_calculator_pb2 as calculator_pb2
|
||
import rowers.otw_power_calculator_pb2_grpc as calculator_pb2_grpc
|
||
import rowers.rowing_workout_metrics_pb2 as metrics_pb2
|
||
import rowers.rowing_workout_metrics_pb2_grpc as metrics_pb2_grpc
|
||
|
||
from django.conf import settings
|
||
from django.db.utils import IntegrityError
|
||
|
||
# extra read of config
|
||
|
||
SITE_URL = CFG['site_url']
|
||
SITE_URL_DEV = CFG['site_url']
|
||
PROGRESS_CACHE_SECRET = CFG['progress_cache_secret']
|
||
try:
|
||
SETTINGS_NAME = CFG['settings_name']
|
||
except KeyError: # pragma: no cover
|
||
SETTINGS_NAME = 'rowsandall_ap.settings'
|
||
|
||
try:
|
||
UPLOAD_SERVICE_URL = CFG['upload_service_url']
|
||
except KeyError: # pragma: no cover
|
||
UPLOAD_SERVICE_URL = "http://localhost:8000/rowers/workout/api/upload/"
|
||
try:
|
||
UPLOAD_SERVICE_SECRET = CFG['upload_service_secret']
|
||
except KeyError: # pragma: no cover
|
||
UPLOAD_SERVICE_SECRET = "FoYezZWLSyfAVimumpHEeYsJjsNCerxV"
|
||
|
||
NK_API_LOCATION = CFG["nk_api_location"]
|
||
TP_CLIENT_ID = CFG["tp_client_id"]
|
||
TP_CLIENT_SECRET = CFG["tp_client_secret"]
|
||
TP_API_LOCATION = CFG["tp_api_location"]
|
||
tpapilocation = TP_API_LOCATION
|
||
|
||
from requests_oauthlib import OAuth1, OAuth1Session
|
||
|
||
import pandas as pd
|
||
import polars as pl
|
||
from polars.exceptions import (
|
||
ColumnNotFoundError, ComputeError, ShapeError
|
||
)
|
||
|
||
|
||
from django_rq import job
|
||
from django.utils import timezone
|
||
from django.utils.html import strip_tags
|
||
|
||
from rowers.utils import deserialize_list, ewmovingaverage, wavg, dologging
|
||
import rowers.utils as utils
|
||
from rowers.emails import htmlstrip
|
||
from rowers import mytypes
|
||
|
||
|
||
from rowers.dataroutines import (
|
||
getsmallrowdata_pd, updatecpdata_sql, update_c2id_sql,
|
||
read_data,
|
||
#update_workout_field_sql,
|
||
update_agegroup_db, update_strokedata,
|
||
add_c2_stroke_data_db, totaltime_sec_to_string,
|
||
create_c2_stroke_data_db, update_empower,
|
||
# database_url_debug,
|
||
database_url, dataprep,
|
||
# create_strava_stroke_data_db
|
||
)
|
||
|
||
database_url_debug = database_url
|
||
|
||
|
||
from rowers.opaque import encoder
|
||
|
||
from django.core.mail import (
|
||
send_mail,
|
||
EmailMessage, EmailMultiAlternatives,
|
||
)
|
||
|
||
from django.template import Context
|
||
from django.db.utils import OperationalError
|
||
from jinja2 import Template, Environment, FileSystemLoader
|
||
env = Environment(loader=FileSystemLoader(["rowers/templates"]))
|
||
|
||
|
||
def safetimedelta(x):
|
||
try:
|
||
return timedelta(seconds=x)
|
||
except ValueError:
|
||
return timedelta(seconds=0)
|
||
|
||
|
||
siteurl = SITE_URL
|
||
|
||
|
||
# testing task
|
||
|
||
|
||
# Concept2 logbook sends over split data for each interval
|
||
# We use it here to generate a custom summary
|
||
# Some users complained about small differences
|
||
def summaryfromsplitdata(splitdata, data, filename, sep='|', workouttype='rower'):
|
||
workouttype = workouttype.lower()
|
||
|
||
totaldist = data['distance']
|
||
totaltime = data['time']/10.
|
||
try:
|
||
spm = data['stroke_rate']
|
||
except KeyError:
|
||
spm = 0
|
||
try:
|
||
resttime = data['rest_time']/10.
|
||
except KeyError: # pragma: no cover
|
||
resttime = 0
|
||
try:
|
||
restdistance = data['rest_distance']
|
||
except KeyError: # pragma: no cover
|
||
restdistance = 0
|
||
try:
|
||
avghr = data['heart_rate']['average']
|
||
except KeyError: # pragma: no cover
|
||
avghr = 0
|
||
try:
|
||
maxhr = data['heart_rate']['max']
|
||
except KeyError: # pragma: no cover
|
||
maxhr = 0
|
||
|
||
try:
|
||
avgpace = 500.*totaltime/totaldist
|
||
except (ZeroDivisionError, OverflowError): # pragma: no cover
|
||
avgpace = 0.
|
||
|
||
try:
|
||
restpace = 500.*resttime/restdistance
|
||
except (ZeroDivisionError, OverflowError): # pragma: no cover
|
||
restpace = 0.
|
||
|
||
try:
|
||
velo = totaldist/totaltime
|
||
avgpower = 2.8*velo**(3.0)
|
||
except (ZeroDivisionError, OverflowError): # pragma: no cover
|
||
velo = 0
|
||
avgpower = 0
|
||
if workouttype in ['bike', 'bikeerg']: # pragma: no cover
|
||
velo = velo/2.
|
||
avgpower = 2.8*velo**(3.0)
|
||
velo = velo*2
|
||
|
||
try:
|
||
restvelo = restdistance/resttime
|
||
except (ZeroDivisionError, OverflowError): # pragma: no cover
|
||
restvelo = 0
|
||
|
||
restpower = 2.8*restvelo**(3.0)
|
||
if workouttype in ['bike', 'bikeerg']: # pragma: no cover
|
||
restvelo = restvelo/2.
|
||
restpower = 2.8*restvelo**(3.0)
|
||
restvelo = restvelo*2
|
||
|
||
try:
|
||
avgdps = totaldist/data['stroke_count']
|
||
except (ZeroDivisionError, OverflowError, KeyError):
|
||
avgdps = 0
|
||
|
||
from rowingdata import summarystring, workstring, interval_string
|
||
|
||
sums = summarystring(totaldist, totaltime, avgpace, spm, avghr, maxhr,
|
||
avgdps, avgpower, readFile=filename,
|
||
separator=sep)
|
||
|
||
sums += workstring(totaldist, totaltime, avgpace, spm, avghr, maxhr,
|
||
avgdps, avgpower, separator=sep, symbol='W')
|
||
|
||
sums += workstring(restdistance, resttime, restpace, 0, 0, 0, 0, restpower,
|
||
separator=sep,
|
||
symbol='R')
|
||
|
||
sums += '\nWorkout Details\n'
|
||
sums += '#-{sep}SDist{sep}-Split-{sep}-SPace-{sep}-Pwr-{sep}SPM-{sep}AvgHR{sep}MaxHR{sep}DPS-\n'.format(
|
||
sep=sep
|
||
)
|
||
|
||
intervalnr = 0
|
||
sa = []
|
||
results = []
|
||
|
||
try:
|
||
timebased = data['workout_type'] in [
|
||
'FixedTimeSplits', 'FixedTimeInterval']
|
||
except KeyError: # pragma: no cover
|
||
timebased = False
|
||
|
||
for interval in splitdata:
|
||
try:
|
||
idist = interval['distance']
|
||
except KeyError: # pragma: no cover
|
||
idist = 0
|
||
|
||
try:
|
||
itime = interval['time']/10.
|
||
except KeyError: # pragma: no cover
|
||
itime = 0
|
||
try:
|
||
ipace = 500.*itime/idist
|
||
except (ZeroDivisionError, OverflowError): # pragma: no cover
|
||
ipace = 180.
|
||
|
||
try:
|
||
ispm = interval['stroke_rate']
|
||
except KeyError: # pragma: no cover
|
||
ispm = 0
|
||
try:
|
||
irest_time = interval['rest_time']/10.
|
||
except KeyError: # pragma: no cover
|
||
irest_time = 0
|
||
try:
|
||
iavghr = interval['heart_rate']['average']
|
||
except KeyError: # pragma: no cover
|
||
iavghr = 0
|
||
try:
|
||
imaxhr = interval['heart_rate']['average']
|
||
except KeyError: # pragma: no cover
|
||
imaxhr = 0
|
||
|
||
# create interval values
|
||
iarr = [idist, 'meters', 'work']
|
||
resarr = [itime]
|
||
if timebased: # pragma: no cover
|
||
iarr = [itime, 'seconds', 'work']
|
||
resarr = [idist]
|
||
|
||
if irest_time > 0:
|
||
iarr += [irest_time, 'seconds', 'rest']
|
||
try:
|
||
resarr += [interval['rest_distance']]
|
||
except KeyError:
|
||
resarr += [np.nan]
|
||
|
||
sa += iarr
|
||
results += resarr
|
||
|
||
if itime != 0:
|
||
ivelo = idist/itime
|
||
ipower = 2.8*ivelo**(3.0)
|
||
if workouttype in ['bike', 'bikeerg']: # pragma: no cover
|
||
ipower = 2.8*(ivelo/2.)**(3.0)
|
||
else: # pragma: no cover
|
||
ivelo = 0
|
||
ipower = 0
|
||
|
||
sums += interval_string(intervalnr, idist, itime, ipace, ispm,
|
||
iavghr, imaxhr, 0, ipower, separator=sep)
|
||
intervalnr += 1
|
||
|
||
return sums, sa, results
|
||
|
||
from rowers.utils import intensitymap
|
||
|
||
def strcapitalize(s):
|
||
if s is None:
|
||
return None
|
||
if isinstance(s, str):
|
||
return s[0].upper() + s[1:]
|
||
|
||
return s
|
||
|
||
def correct_intensity(workout):
|
||
# reads the steps and if the intensity is an integer, converts it to a string
|
||
steps = workout['steps']
|
||
for step in steps:
|
||
if 'intensity' in step:
|
||
if isinstance(step['intensity'], int):
|
||
step['intensity'] = intensitymap[step['intensity']]
|
||
step['durationType'] = strcapitalize(step['durationType'])
|
||
step['targetType'] = strcapitalize(step['targetType'])
|
||
step['intensity'] = strcapitalize(step['intensity'])
|
||
|
||
return workout
|
||
|
||
import io
|
||
import zipfile
|
||
|
||
@app.task
|
||
def email_user_workouts_zip_chunk(rower, workout_ids, filename, instrokedata,
|
||
part, total_parts, debug=False, **kwargs):
|
||
zip_file_path = os.path.join(settings.MEDIA_ROOT, filename)
|
||
|
||
with zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||
for workout_id in workout_ids:
|
||
try:
|
||
workout = Workout.objects.get(id=workout_id)
|
||
sport = mytypes.fitmapping.get(workout.workouttype, 'generic')
|
||
fit_filename = f"workout_{sport}_{workout.id}_{workout.date.strftime('%Y%m%d')}.fit"
|
||
rowdata = rdata(csvfile=workout.csvfilename)
|
||
res = rowdata.exporttofit(fit_filename, sport=sport, notes=workout.name,
|
||
instroke_export=instrokedata)
|
||
zip_file.write(fit_filename, arcname=fit_filename)
|
||
os.remove(fit_filename)
|
||
if res.get('companion_file'):
|
||
companion_filename = res['companion_file']
|
||
zip_file.write(companion_filename, arcname=os.path.basename(companion_filename))
|
||
os.remove(companion_filename)
|
||
except Exception as e:
|
||
dologging('export_all_workouts.log',
|
||
f"Error exporting workout {workout_id}: {e}")
|
||
continue
|
||
|
||
download_url = f"{SITE_URL}/rowers/workouts/download/?file={filename}"
|
||
subject = f"Rowsandall Workouts Export (part {part} of {total_parts})"
|
||
send_template_email(
|
||
'Rowsandall <info@rowsandall.com>',
|
||
[rower.user.email],
|
||
subject,
|
||
'workouts_export_email.html',
|
||
{'download_url': download_url, 'filename': filename,
|
||
'part': part, 'total_parts': total_parts},
|
||
)
|
||
return 1
|
||
|
||
@app.task
|
||
def email_all_user_workouts_zip(rower, start_date, end_date,
|
||
workouttype, instrokedata, debug=False, **kwargs):
|
||
# Get all workouts for this user, optionally filtered by date range
|
||
workouts = Workout.objects.filter(user=rower).order_by('-date')
|
||
|
||
# Apply date filters if provided
|
||
if start_date:
|
||
workouts = workouts.filter(date__gte=start_date)
|
||
if end_date:
|
||
workouts = workouts.filter(date__lte=end_date)
|
||
|
||
if workouttype != 'All':
|
||
workouts = workouts.filter(workouttype=workouttype)
|
||
|
||
if not workouts.exists():
|
||
dologging('export_all_workouts.log', f"No workouts found for user {rower.user.id} in date range {start_date} to {end_date}")
|
||
return 0
|
||
|
||
export_date = datetime.datetime.now().strftime('%Y%m%d')
|
||
filename = f"{rower.user.username}_workouts_{export_date}_from_{start_date}_to_{end_date}_{uuid4().hex[:8]}.zip"
|
||
zip_file_path = os.path.join(settings.MEDIA_ROOT, filename)
|
||
|
||
with zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||
for workout in workouts:
|
||
try:
|
||
rowdata = rdata(csvfile=workout.csvfilename)
|
||
workouttype = mytypes.fitmapping.get(workout.workouttype, 'generic')
|
||
|
||
fit_filename = f"workout_{workouttype}_{workout.id}_{workout.date.strftime('%Y%m%d')}.fit"
|
||
# exporttofit creates a file, we need to add it to the zip_file
|
||
res = rowdata.exporttofit(fit_filename, sport=workouttype, notes=workout.name,
|
||
instroke_export=instrokedata)
|
||
zip_file.write(fit_filename, arcname=fit_filename)
|
||
os.remove(fit_filename)
|
||
# res is a dict. If res[’companion_file’] is not None,
|
||
# it contains the filename of the companion file that was
|
||
# created (e.g. for instroke data) which also needs to be added to the zip
|
||
if res.get('companion_file'):
|
||
companion_filename = res['companion_file']
|
||
zip_file.write(companion_filename, arcname=os.path.basename(companion_filename))
|
||
os.remove(companion_filename)
|
||
|
||
except Exception as e: # pragma: no cover
|
||
dologging('export_all_workouts.log', f"Error exporting workout {workout.id}: {str(e)}")
|
||
continue
|
||
|
||
|
||
# Send email with download link
|
||
subject = "Rowsandall Workouts Export"
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
useremail = rower.user.email
|
||
|
||
# Generate download URL
|
||
download_url = f"{SITE_URL}/rowers/workouts/download/?file={filename}"
|
||
|
||
_ = send_template_email(
|
||
from_email, [useremail],
|
||
subject,
|
||
'workouts_export_email.html',
|
||
{'download_url': download_url, 'filename': filename},
|
||
)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_loadnextweek(rower, debug=False, **kwargs):
|
||
|
||
plan = rower.training_plan_code
|
||
secret = rower.training_plan_secret
|
||
post_data = {
|
||
'fitness': rower.actualfit,
|
||
'fatigue': rower.actualfatigue,
|
||
'plan': plan,
|
||
'secret': secret,
|
||
}
|
||
|
||
url = "http://localhost:8898/next-week-plan/"
|
||
response = requests.post(url, data=post_data)
|
||
|
||
if response.status_code in [200, 201]:
|
||
data = response.json()
|
||
|
||
today = timezone.now()
|
||
startdate = today - timedelta(days=today.weekday())+timezone.timedelta(days=7)
|
||
enddate = startdate + timedelta(days=6)
|
||
|
||
sps = PlannedSession.objects.filter(
|
||
rower__in=[rower],
|
||
startdate__gte=startdate,
|
||
enddate__lte=enddate,
|
||
is_template=False,
|
||
)
|
||
|
||
for ps in sps:
|
||
ps.delete()
|
||
|
||
trainingdays = data['cycles']
|
||
# start date is the first day of the following week
|
||
|
||
ndays = 0
|
||
for day in trainingdays:
|
||
try:
|
||
workouts = day[0][1:]
|
||
except IndexError:
|
||
workouts =[]
|
||
for workout in workouts:
|
||
sessionsport = 'water'
|
||
try:
|
||
sessionsport = mytypes.fitmappinginv[workout['sport'].lower()]
|
||
except KeyError:
|
||
pass
|
||
|
||
preferreddate = startdate+timedelta(days=ndays)
|
||
sessionmode = 'time'
|
||
|
||
ps = PlannedSession(
|
||
startdate=preferreddate - timedelta(days=preferreddate.weekday()),
|
||
enddate=preferreddate + timedelta(days=-preferreddate.weekday()-1, weeks=1),
|
||
preferreddate=preferreddate,
|
||
sessionsport=sessionsport, # change this
|
||
name=workout['workoutName'],
|
||
steps=correct_intensity(workout),
|
||
manager=rower.user,
|
||
sessionmode=sessionmode,
|
||
comment=workout['description'],
|
||
from_plan=None,
|
||
)
|
||
ps.save()
|
||
ps.rower.add(rower)
|
||
ps.save()
|
||
if ps.fitfile:
|
||
ps.steps = {}
|
||
ps.save()
|
||
ndays += 1
|
||
|
||
|
||
return 1
|
||
|
||
return 0
|
||
|
||
@app.task
|
||
def handle_assignworkouts(workouts, rowers, remove_workout, debug=False, **kwargs):
|
||
for workout in workouts:
|
||
uploadoptions = {
|
||
'secret': UPLOAD_SERVICE_SECRET,
|
||
'title': workout.name,
|
||
'boattype': workout.boattype,
|
||
'workouttype': workout.workouttype,
|
||
'inboard': workout.inboard,
|
||
'oarlength': workout.oarlength,
|
||
'summary': workout.summary,
|
||
'elapsedTime': 3600.*workout.duration.hour+60*workout.duration.minute+workout.duration.second,
|
||
'totalDistance': workout.distance,
|
||
'useImpeller': workout.impeller,
|
||
'seatNumber': workout.seatnumber,
|
||
'boatName': workout.boatname,
|
||
'portStarboard': workout.empowerside,
|
||
}
|
||
for rower in rowers:
|
||
failed = False
|
||
csvfilename = 'media/{code}.csv'.format(code=uuid4().hex[:16])
|
||
try:
|
||
with open(csvfilename,'wb') as f:
|
||
shutil.copy(workout.csvfilename,csvfilename)
|
||
except FileNotFoundError:
|
||
try:
|
||
with open(csvfilename,'wb') as f:
|
||
csvfilename = csvfilename+'.gz'
|
||
shutil.copy(workout.csvfilename+'.gz', csvfilename)
|
||
except:
|
||
failed = True
|
||
if not failed:
|
||
uploadoptions['user'] = rower.user.id
|
||
uploadoptions['file'] = csvfilename
|
||
session = requests.session()
|
||
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
||
session.headers.update(newHeaders)
|
||
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
|
||
print(response.text)
|
||
if remove_workout:
|
||
workout.delete()
|
||
|
||
return 1
|
||
|
||
@app.task
|
||
def create_sessions_from_json_async(plansteps, rower, startdate, manager, planbyrscore, plan, plan_past_days, debug=False, **kwargs):
|
||
trainingdays = plansteps['trainingDays']
|
||
planstartdate = startdate
|
||
for day in trainingdays:
|
||
for workout in day['workouts']:
|
||
sessionsport = 'water'
|
||
try:
|
||
sessionsport = mytypes.fitmappinginv[workout['sport'].lower()]
|
||
except KeyError:
|
||
pass
|
||
|
||
preferreddate = planstartdate+timedelta(days=day['order'])
|
||
|
||
sessionmode = 'time'
|
||
if planbyrscore:
|
||
sessionmode = 'rScore'
|
||
|
||
create_session = False
|
||
if plan_past_days:
|
||
create_session = True
|
||
elif preferreddate >= timezone.now().date():
|
||
create_session = True
|
||
|
||
|
||
if create_session:
|
||
ps = PlannedSession(
|
||
startdate=preferreddate -
|
||
timedelta(days=preferreddate.weekday()),
|
||
enddate=preferreddate +
|
||
timedelta(days=-preferreddate.weekday()-1, weeks=1),
|
||
preferreddate=preferreddate,
|
||
sessionsport=sessionsport, # change this
|
||
name=workout['workoutName'],
|
||
steps=workout,
|
||
manager=manager,
|
||
sessionmode=sessionmode,
|
||
comment=workout['description'],
|
||
from_plan=plan,
|
||
)
|
||
|
||
ps.save()
|
||
|
||
teams = Team.objects.filter(manager=ps.manager)
|
||
members = Rower.objects.filter(team__in=teams).distinct()
|
||
if rower in members:
|
||
ps.rower.add(rower)
|
||
ps.save()
|
||
elif ps.manager.rower == rower:
|
||
ps.rower.add(rower)
|
||
ps.save()
|
||
|
||
return 1
|
||
|
||
@app.task
|
||
def handle_post_workout_api(uploadoptions, debug=False, **kwargs): # pragma: no cover
|
||
session = requests.session()
|
||
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
||
session.headers.update(newHeaders)
|
||
|
||
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
|
||
|
||
if response.status_code != 200:
|
||
return 0
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_remove_workouts_team(ws, t, debug=False, **kwargs): # pragma: no cover
|
||
for w in ws:
|
||
w.team.remove(t)
|
||
|
||
return 1
|
||
|
||
@app.task
|
||
def handle_add_workouts_team(ws, t, debug=False, **kwargs): # pragma: no cover
|
||
|
||
for w in ws:
|
||
w.team.add(t)
|
||
|
||
return 1
|
||
|
||
def uploadactivity(access_token, filename, description='',
|
||
name='Rowsandall.com workout', workouttype='rowing'): # pragma: no cover
|
||
|
||
data_gz = BytesIO()
|
||
|
||
try:
|
||
with open(filename, 'rb') as inF:
|
||
s = inF.read()
|
||
with gzip.GzipFile(fileobj=data_gz, mode="w") as gzf:
|
||
gzf.write(s)
|
||
except FileNotFoundError:
|
||
return 0, 0, 0, 0
|
||
|
||
headers = {
|
||
'Content-Type': 'application/json',
|
||
'Accept': 'application/json',
|
||
'Authorization': 'Bearer %s' % access_token
|
||
}
|
||
|
||
# Data field is base64 encoded file read from filename
|
||
data = {
|
||
"UploadClient": "rowsandall",
|
||
"Filename": filename,
|
||
"SetWorkoutPublic": True,
|
||
"Title": name,
|
||
"Type": workouttype,
|
||
"Comment": description,
|
||
"Data": base64.b64encode(data_gz.getvalue()).decode("ascii")
|
||
}
|
||
|
||
resp = requests.post(tpapilocation+"/v3/file",
|
||
data=json.dumps(data),
|
||
headers=headers, verify=False)
|
||
|
||
if resp.status_code not in (200, 202): # pragma: no cover
|
||
dologging('tp_export.log',resp.status_code)
|
||
dologging('tp_export.log',resp.reason)
|
||
|
||
return 0, resp.reason, resp.status_code, headers
|
||
else:
|
||
return 1, "ok", 200, resp.headers
|
||
|
||
return 0, 0, 0, 0 # pragma: no cover
|
||
|
||
@app.task
|
||
def send_session_stats(user, debug=False, **kwargs):
|
||
ws = Workout.objects.filter(plannedsession__isnull=False)
|
||
|
||
results = []
|
||
|
||
for w in ws:
|
||
ps = w.plannedsession
|
||
r = w.user
|
||
ratio, status, cdate = is_session_complete(r, ps)
|
||
d = {
|
||
'date':w.date,
|
||
'session_id':ps.id,
|
||
'session_name':ps.name,
|
||
'complete': ratio,
|
||
'status': status,
|
||
'rscore': w.rscore,
|
||
'duration': w.duration,
|
||
}
|
||
results.append(d)
|
||
|
||
df = pd.DataFrame(results)
|
||
|
||
code = str(uuid4())
|
||
filename = code+'.csv'
|
||
|
||
df.to_csv(filename)
|
||
|
||
subject = "Session Stats"
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
useremail = user.email
|
||
|
||
_ = send_template_email(
|
||
from_email, [useremail],
|
||
subject,
|
||
'sessionstats.html',
|
||
d,
|
||
attach_file=filename,
|
||
)
|
||
|
||
os.remove(filename)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def check_tp_workout_id(workout, location, attempts=5, debug=False, **kwargs): # pragma: no cover
|
||
authorizationstring = str('Bearer ' + workout.user.tptoken)
|
||
headers = {'Authorization': authorizationstring,
|
||
'user-agent': 'sanderroosendaal',
|
||
'Content-Type': 'application/json'}
|
||
response = requests.get(location, headers=headers, params={})
|
||
|
||
if response.status_code == 200:
|
||
status = response.json()['Status']
|
||
if status == 'Success':
|
||
tpid = response.json()['WorkoutIds'][0]
|
||
workout.uploadedtotp = tpid
|
||
record = create_or_update_syncrecord(workout.user, workout, tpid=tpid)
|
||
workout.save()
|
||
else:
|
||
dologging('tp_export.log','failed to get workout id from trainingpeaks')
|
||
dologging('tp_export.log',response.text)
|
||
dologging('tp_export.log',status)
|
||
return 0
|
||
else:
|
||
dologging('tp_export.log','failed to get workout id from trainingpeaks')
|
||
dologging('tp_export.log',response.text)
|
||
return 0
|
||
|
||
return 1
|
||
|
||
@app.task
|
||
def handle_workout_tp_upload(w, thetoken, tcxfilename, workouttype, debug=False, **kwargs): # pragma: no cover
|
||
tpid = 0
|
||
r = w.user
|
||
dologging('tp_export.log','uploading workout {workoutid} to trainingpeaks for user {id}'.format(id=r.id,workoutid=w.id))
|
||
|
||
if not tcxfilename:
|
||
return 0
|
||
|
||
res, reason, status_code, headers = uploadactivity(
|
||
thetoken, tcxfilename,
|
||
name=w.name, workouttype=workouttype,
|
||
)
|
||
|
||
if res == 0:
|
||
w.uploadedtotcx = -1
|
||
try:
|
||
os.remove(tcxfilename)
|
||
except:
|
||
pass
|
||
|
||
w.save()
|
||
return 0
|
||
|
||
dologging('tp_export.log','uploading workout {workoutid} to trainingpeaks for user {id} succeeded'.format(id=r.id,workoutid=w.id))
|
||
w.uploadedtotp = res
|
||
record = create_or_update_syncrecord(w.user, w, tpid=tpid)
|
||
tpid = res
|
||
w.save()
|
||
try:
|
||
os.remove(tcxfilename)
|
||
except FileNotFoundError:
|
||
pass
|
||
|
||
check_tp_workout_id(w,headers['Location'])
|
||
|
||
return tpid
|
||
|
||
@app.task
|
||
def instroke_static(w, metric, debug=False, **kwargs): # pragma: no cover
|
||
f1 = w.csvfilename[6:-4]
|
||
rowdata = rdata(csvfile=w.csvfilename)
|
||
|
||
|
||
timestr = strftime("%Y%m%d-%H%M%S")
|
||
imagename = f1+timestr+'.png'
|
||
fullpathimagename = 'static/plots/'+imagename
|
||
r = w.user
|
||
fig1 = rowdata.get_plot_instroke(metric)
|
||
canvas = FigureCanvas(fig1)
|
||
canvas.print_figure('static/plots/'+imagename)
|
||
plt.close(fig1)
|
||
fig1.clf()
|
||
|
||
try:
|
||
width, height = Image.open(fullpathimagename).size
|
||
except:
|
||
width = 1200
|
||
height = 600
|
||
|
||
imgs = GraphImage.objects.filter(workout=w)
|
||
if imgs.count() < 7:
|
||
i = GraphImage(workout=w,
|
||
creationdatetime=timezone.now(),
|
||
filename=fullpathimagename,
|
||
width=width, height=height)
|
||
|
||
i.save()
|
||
else:
|
||
return 0
|
||
|
||
return 1
|
||
|
||
|
||
|
||
@app.task
|
||
def handle_request_post(url, data, debug=False, **kwargs): # pragma: no cover
|
||
if 'localhost' in url:
|
||
url = 'http'+url[4:]
|
||
response = requests.post(url, data, verify=False)
|
||
dologging('upload_api.log', data)
|
||
dologging('upload_api.log', response.status_code)
|
||
return response.status_code
|
||
|
||
|
||
@app.task
|
||
def add(x, y): # pragma: no cover
|
||
return x + y
|
||
|
||
|
||
@app.task
|
||
def handle_c2_sync(workoutid, url, headers, data, debug=False, **kwargs):
|
||
response = requests.post(url, headers=headers, data=data)
|
||
if response.status_code not in [200, 201]: # pragma: no cover
|
||
s = 'C2 upload failed for user workoutid {workoutid} with response code {code} and error {text}'.format(
|
||
code=response.status_code,
|
||
workoutid = workoutid,
|
||
text = response.text
|
||
|
||
)
|
||
dologging('c2_log.log',s)
|
||
return 0
|
||
|
||
s = response.json()
|
||
c2id = s['data']['id']
|
||
|
||
try:
|
||
workout = Workout.objects.get(id=workoutid)
|
||
except Workout.DoesNotExist: # pragma: no cover
|
||
dologging('c2_log.log','failed for c2id {c2id}'.format(c2id=c2id))
|
||
return 0
|
||
|
||
s = 'C2 upload succeeded with {c2id} user id {userid}'.format(
|
||
c2id=c2id,
|
||
userid=workout.user.user.id
|
||
)
|
||
|
||
dologging('c2_log.log',s)
|
||
|
||
workout.uploadedtoc2 = c2id
|
||
workout.save()
|
||
|
||
record = create_or_update_syncrecord(workout.user, workout, c2id=c2id)
|
||
|
||
|
||
return 1
|
||
|
||
def splitstdata(lijst): # pragma: no cover
|
||
t = []
|
||
latlong = []
|
||
while len(lijst) >= 2:
|
||
t.append(lijst[0])
|
||
latlong.append(lijst[1])
|
||
lijst = lijst[2:]
|
||
|
||
return [np.array(t), np.array(latlong)]
|
||
|
||
@app.task
|
||
def handle_sporttracks_workout_from_data(user, importid, source,
|
||
workoutsource, debug=False, **kwargs): # pragma: no cover
|
||
|
||
r = user.rower
|
||
authorizationstring = str('Bearer ' + r.sporttrackstoken)
|
||
headers = {'Authorization': authorizationstring,
|
||
'user-agent': 'sanderroosendaal',
|
||
'Content-Type': 'application/json'}
|
||
url = "https://api.sporttracks.mobi/api/v2/fitnessActivities/" + \
|
||
str(importid)
|
||
s = requests.get(url, headers=headers)
|
||
|
||
data = s.json()
|
||
|
||
strokedata = pd.DataFrame.from_dict({
|
||
key: pd.Series(value, dtype='object') for key, value in data.items()
|
||
})
|
||
|
||
try:
|
||
workouttype = data['type']
|
||
except KeyError: # pragma: no cover
|
||
workouttype = 'other'
|
||
|
||
if workouttype not in [x[0] for x in Workout.workouttypes]:
|
||
workouttype = 'other'
|
||
try:
|
||
comments = data['comments']
|
||
except:
|
||
comments = ''
|
||
|
||
r = Rower.objects.get(user=user)
|
||
rowdatetime = iso8601.parse_date(data['start_time'])
|
||
starttimeunix = arrow.get(rowdatetime).timestamp()
|
||
|
||
try:
|
||
title = data['name']
|
||
except: # pragma: no cover
|
||
title = "Imported data"
|
||
|
||
try:
|
||
res = splitstdata(data['distance'])
|
||
distance = res[1]
|
||
times_distance = res[0]
|
||
except KeyError: # pragma: no cover
|
||
try:
|
||
res = splitstdata(data['heartrate'])
|
||
times_distance = res[0]
|
||
distance = 0*times_distance
|
||
except KeyError:
|
||
return (0, "No distance or heart rate data in the workout")
|
||
|
||
try:
|
||
locs = data['location']
|
||
|
||
res = splitstdata(locs)
|
||
times_location = res[0]
|
||
latlong = res[1]
|
||
latcoord = []
|
||
loncoord = []
|
||
|
||
for coord in latlong:
|
||
lat = coord[0]
|
||
lon = coord[1]
|
||
latcoord.append(lat)
|
||
loncoord.append(lon)
|
||
except:
|
||
times_location = times_distance
|
||
latcoord = np.zeros(len(times_distance))
|
||
loncoord = np.zeros(len(times_distance))
|
||
if workouttype in mytypes.otwtypes: # pragma: no cover
|
||
workouttype = 'rower'
|
||
|
||
try:
|
||
res = splitstdata(data['cadence'])
|
||
times_spm = res[0]
|
||
spm = res[1]
|
||
except KeyError: # pragma: no cover
|
||
times_spm = times_distance
|
||
spm = 0*times_distance
|
||
|
||
try:
|
||
res = splitstdata(data['heartrate'])
|
||
hr = res[1]
|
||
times_hr = res[0]
|
||
except KeyError:
|
||
times_hr = times_distance
|
||
hr = 0*times_distance
|
||
|
||
# create data series and remove duplicates
|
||
distseries = pd.Series(distance, index=times_distance)
|
||
distseries = distseries.groupby(distseries.index).first()
|
||
latseries = pd.Series(latcoord, index=times_location)
|
||
latseries = latseries.groupby(latseries.index).first()
|
||
lonseries = pd.Series(loncoord, index=times_location)
|
||
lonseries = lonseries.groupby(lonseries.index).first()
|
||
spmseries = pd.Series(spm, index=times_spm)
|
||
spmseries = spmseries.groupby(spmseries.index).first()
|
||
hrseries = pd.Series(hr, index=times_hr)
|
||
hrseries = hrseries.groupby(hrseries.index).first()
|
||
|
||
# Create dicts and big dataframe
|
||
d = {
|
||
' Horizontal (meters)': distseries,
|
||
' latitude': latseries,
|
||
' longitude': lonseries,
|
||
' Cadence (stokes/min)': spmseries,
|
||
' HRCur (bpm)': hrseries,
|
||
}
|
||
|
||
df = pd.DataFrame(d)
|
||
|
||
df = df.groupby(level=0).last()
|
||
|
||
cum_time = df.index.values
|
||
df[' ElapsedTime (sec)'] = cum_time
|
||
|
||
velo = df[' Horizontal (meters)'].diff()/df[' ElapsedTime (sec)'].diff()
|
||
|
||
df[' Power (watts)'] = 0.0*velo
|
||
|
||
nr_rows = len(velo.values)
|
||
|
||
df[' DriveLength (meters)'] = np.zeros(nr_rows)
|
||
df[' StrokeDistance (meters)'] = np.zeros(nr_rows)
|
||
df[' DriveTime (ms)'] = np.zeros(nr_rows)
|
||
df[' StrokeRecoveryTime (ms)'] = np.zeros(nr_rows)
|
||
df[' AverageDriveForce (lbs)'] = np.zeros(nr_rows)
|
||
df[' PeakDriveForce (lbs)'] = np.zeros(nr_rows)
|
||
df[' lapIdx'] = np.zeros(nr_rows)
|
||
|
||
unixtime = cum_time+starttimeunix
|
||
unixtime[0] = starttimeunix
|
||
|
||
df['TimeStamp (sec)'] = unixtime
|
||
|
||
dt = np.diff(cum_time).mean()
|
||
wsize = round(5./dt)
|
||
|
||
velo2 = ewmovingaverage(velo, wsize)
|
||
|
||
df[' Stroke500mPace (sec/500m)'] = 500./velo2
|
||
|
||
df = df.fillna(0)
|
||
|
||
df.sort_values(by='TimeStamp (sec)', ascending=True)
|
||
|
||
|
||
csvfilename = 'media/{code}_{importid}.csv'.format(
|
||
importid=importid,
|
||
code=uuid4().hex[:16]
|
||
)
|
||
|
||
res = df.to_csv(csvfilename+'.gz', index_label='index',
|
||
compression='gzip')
|
||
|
||
uploadoptions = {
|
||
'secret': UPLOAD_SERVICE_SECRET,
|
||
'user': user.id,
|
||
'file': csvfilename+'.gz',
|
||
'title': '',
|
||
'workouttype': workouttype,
|
||
'boattype': '1x',
|
||
'sporttracksid': importid,
|
||
'title':title,
|
||
}
|
||
session = requests.session()
|
||
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
||
session.headers.update(newHeaders)
|
||
_ = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sporttracks_sync(workoutid, url, headers, data, debug=False, **kwargs):
|
||
response = requests.post(url, headers=headers, data=data)
|
||
if response.status_code not in [200, 201]: # pragma: no cover
|
||
return 0
|
||
|
||
t = response.json()
|
||
uri = t['uris'][0]
|
||
regex = '.*?sporttracks\.mobi\/api\/v2\/fitnessActivities/(\d+)\.json$'
|
||
m = re.compile(regex).match(uri).group(1)
|
||
|
||
id = int(m)
|
||
|
||
workout = Workout.objects.get(id=workoutid)
|
||
workout.uploadedtosporttracks = id
|
||
workout.save()
|
||
record = create_or_update_syncrecord(workout.user, workout, sporttracksid=id)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_strava_sync(stravatoken,
|
||
workoutid, filename, name, activity_type, description, debug=False, **kwargs):
|
||
client = stravalib.Client(access_token=stravatoken)
|
||
failed = False
|
||
try:
|
||
with open(filename, 'rb') as f:
|
||
try:
|
||
act = client.upload_activity(f, 'tcx.gz', name=name)
|
||
try:
|
||
res = act.wait(poll_interval=1.0, timeout=30)
|
||
except stravalib.exc.ActivityUploadFailed: # pragma: no cover
|
||
dologging(
|
||
'strava_fail.log',
|
||
'Strava upload failed for Workout {id} ActivityUploadFailed'.format(
|
||
id=workoutid)
|
||
)
|
||
tb = traceback.format_exc()
|
||
dologging('strava_fail.log', tb)
|
||
failed = True
|
||
except stravalib.exc.TimeoutExceeded: # pragma: no cover
|
||
dologging(
|
||
'strava_fail.log',
|
||
'Strava upload failed for Workout {id} TimeOutExceeded'.format(
|
||
id=workoutid)
|
||
)
|
||
tb = traceback.format_exc()
|
||
dologging('strava_fail.log', tb)
|
||
failed = True
|
||
except JSONDecodeError: # pragma: no cover
|
||
dologging(
|
||
'strava_fail.log',
|
||
'Strava upload failed for Workout {id} JSONDecodeError'.format(
|
||
id=workoutid)
|
||
)
|
||
tb = traceback.format_exc()
|
||
dologging('strava_fail.log', tb)
|
||
failed = True
|
||
except stravalib.exc.ObjectNotFound: # pragma: no cover
|
||
dologging(
|
||
'strava_fail.log',
|
||
'Strava upload failed for Workout {id} ObjectNotFound'.format(
|
||
id=workoutid)
|
||
)
|
||
tb = traceback.format_exc()
|
||
dologging('strava_fail.log', tb)
|
||
failed = True
|
||
except IndexError: # pragma: no cover
|
||
dologging(
|
||
'strava_fail.log',
|
||
'Strava upload failed for Workout {id} IndexError'.format(
|
||
id=workoutid)
|
||
)
|
||
tb = traceback.format_exc()
|
||
dologging('strava_fail.log', tb)
|
||
failed = True
|
||
# temporary hack until stravalib is fixed
|
||
if 'LatLon' in tb:
|
||
dologging('strava_fail.log', 'Trying temporary fix')
|
||
failed = False
|
||
except (
|
||
ActivityUploadFailed, stravalib.exc.RateLimitExceeded, JSONDecodeError
|
||
): # pragma: no cover
|
||
dologging(
|
||
'strava_fail.log', 'Strava upload failed for Workout {id}'.format(id=workoutid))
|
||
tb = traceback.format_exc()
|
||
dologging('strava_fail.log', tb)
|
||
failed = True
|
||
except FileNotFoundError: # pragma: no cover
|
||
dologging('strava_fail.log',
|
||
'Strava upload failed for Workout {id}'.format(id=workoutid))
|
||
failed = True
|
||
|
||
if not failed:
|
||
try:
|
||
workout = Workout.objects.get(id=workoutid)
|
||
except Workout.DoesNotExist: # pragma: no cover
|
||
return 0
|
||
|
||
workout.uploadedtostrava = res.id
|
||
workout.save()
|
||
record = create_or_update_syncrecord(workout.user, workout, stravaid=res.id)
|
||
trainer = False
|
||
if workout.workouttype in mytypes.otetypes:
|
||
trainer = True
|
||
try:
|
||
act = client.update_activity(res.id, activity_type=activity_type,
|
||
description=description, device_name='Rowsandall.com',
|
||
trainer=trainer)
|
||
dologging('strava_export_log.log', 'Updating activity {id} to {type}'.format(
|
||
id=workoutid,
|
||
type=activity_type
|
||
))
|
||
except TypeError: # pragma: no cover
|
||
act = client.update_activity(res.id, activity_type=activity_type,
|
||
description=description, trainer=trainer)
|
||
dologging('strava_export_log.log', 'Updating activity {id} to {type}'.format(
|
||
id=workoutid,
|
||
type=activity_type
|
||
))
|
||
except: # pragma: no cover
|
||
e = sys.exc_info()[0]
|
||
|
||
dologging('strava_export_log.log', 'Update activity failed with error {e} for {id} to {type}'.format(
|
||
id=workoutid,
|
||
type=activity_type,
|
||
e=e
|
||
))
|
||
try:
|
||
os.remove(filename)
|
||
except: # pragma: no cover
|
||
pass
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_c2_import_stroke_data(c2token,
|
||
c2id, workoutid,
|
||
starttimeunix,
|
||
csvfilename, debug=True, **kwargs):
|
||
|
||
if 'workouttype' in kwargs: # pragma: no cover
|
||
workouttype = kwargs['workouttype']
|
||
else:
|
||
workouttype = 'rower'
|
||
|
||
authorizationstring = str('Bearer ' + c2token)
|
||
headers = {'Authorization': authorizationstring,
|
||
'user-agent': 'sanderroosendaal',
|
||
'Content-Type': 'application/json'}
|
||
url = "https://log.concept2.com/api/users/me/results/"+str(c2id)+"/strokes"
|
||
s = requests.get(url, headers=headers)
|
||
if s.status_code == 200:
|
||
strokedata = pd.DataFrame.from_dict(s.json()['data'])
|
||
_ = add_c2_stroke_data_db(
|
||
strokedata, workoutid, starttimeunix,
|
||
csvfilename, debug=debug, workouttype=workouttype
|
||
)
|
||
|
||
return 1
|
||
else: # pragma: no cover
|
||
url = "https://log.concept2.com/api/users/me/results/{id}".format(
|
||
id=c2id)
|
||
|
||
s = requests.get(url, headers=headers)
|
||
|
||
if s.status_code == 200:
|
||
workoutdata = s.json()['data']
|
||
distance = workoutdata['distance']
|
||
c2id = workoutdata['id']
|
||
workouttype = workoutdata['type']
|
||
totaltime = workoutdata['time']/10.
|
||
duration = totaltime_sec_to_string(totaltime)
|
||
duration = datetime.datetime.strptime(
|
||
duration, '%H:%M:%S.%f').time()
|
||
|
||
_ = create_c2_stroke_data_db(
|
||
distance, duration, workouttype,
|
||
workoutid, starttimeunix,
|
||
csvfilename, debug=debug,
|
||
)
|
||
|
||
return 1
|
||
|
||
return 0
|
||
|
||
return 0 # pragma: no cover
|
||
|
||
|
||
def getagegrouprecord(age, sex='male', weightcategory='hwt',
|
||
distance=2000, duration=None, indf=pd.DataFrame()):
|
||
|
||
power = 0
|
||
if not duration:
|
||
try:
|
||
df = indf[indf['distance'] == distance]
|
||
except KeyError: # pragma: no cover
|
||
df = pd.DataFrame()
|
||
else:
|
||
duration = 60*int(duration)
|
||
try:
|
||
df = indf[indf['duration'] == duration]
|
||
except KeyError: # pragma: no cover
|
||
df = pd.DataFrame()
|
||
|
||
if not df.empty:
|
||
ages = df['age']
|
||
powers = df['power']
|
||
|
||
def fitfunc(pars, x):
|
||
return np.abs(pars[0])*(1-x/max(120, pars[1]))-np.abs(
|
||
pars[2])*np.exp(-x/np.abs(pars[3]))+np.abs(pars[4])*(np.sin(np.pi*x/max(50, pars[5])))
|
||
|
||
def errfunc(pars, x, y):
|
||
return fitfunc(pars, x)-y
|
||
|
||
p0 = [700, 120, 700, 10, 100, 100]
|
||
|
||
p1, success = optimize.leastsq(errfunc, p0[:],
|
||
args=(ages, powers))
|
||
|
||
if success and age is not None:
|
||
power = fitfunc(p1, float(age))
|
||
power = 0.5*(np.abs(power)+power)
|
||
elif age is not None: # pragma: no cover
|
||
power = 0.5*(np.abs(power)+power)
|
||
else: # pragma: no cover
|
||
power = 0
|
||
|
||
return power
|
||
|
||
|
||
from rowers.models import polygon_to_path
|
||
|
||
|
||
@app.task(bind=True)
|
||
def handle_check_race_course(self,
|
||
f1, workoutid, courseid,
|
||
recordid, useremail, userfirstname,
|
||
**kwargs):
|
||
|
||
logfile = 'courselog_{workoutid}_{courseid}.log'.format(
|
||
workoutid=workoutid, courseid=courseid)
|
||
logfile2 = 'courses.log'
|
||
dologging(logfile2,logfile)
|
||
|
||
if 'debug' in kwargs: # pragma: no cover
|
||
debug = kwargs['debug']
|
||
else:
|
||
debug = False
|
||
|
||
if 'splitsecond' in kwargs: # pragma: no cover
|
||
splitsecond = kwargs['splitsecond']
|
||
else:
|
||
splitsecond = 0
|
||
|
||
if 'referencespeed' in kwargs: # pragma: no cover
|
||
referencespeed = kwargs['referencespeed']
|
||
else:
|
||
referencespeed = 5.0
|
||
|
||
if 'coursedistance' in kwargs: # pragma: no cover
|
||
coursedistance = kwargs['coursedistance']
|
||
else:
|
||
coursedistance = 0
|
||
|
||
mode = 'race'
|
||
if 'mode' in kwargs: # pragma: no cover
|
||
mode = kwargs['mode']
|
||
|
||
summary = False
|
||
if 'summary' in kwargs: # pragma: no cover
|
||
summary = kwargs['summary']
|
||
|
||
successemail = False
|
||
if 'successemail' in kwargs: # pragma: no cover
|
||
successemail = kwargs['successemail']
|
||
|
||
try:
|
||
row = rdata(csvfile=f1)
|
||
except IOError: # pragma: no cover
|
||
try:
|
||
row = rdata(csvfile=f1 + '.csv')
|
||
except IOError: # pragma: no cover
|
||
try:
|
||
row = rdata(csvfile=f1 + '.gz')
|
||
except IOError: # pragma: no cover
|
||
dologging(logfile,"Did not find file "+f1)
|
||
dologging(logfile2,"Did not find file "+f1)
|
||
return 0
|
||
|
||
try:
|
||
row.extend_data()
|
||
except KeyError: # pragma: no cover
|
||
dologging(logfile,"Could not extend data")
|
||
dologging(logfile2,"Could not extend data")
|
||
return 0
|
||
|
||
# row.df.interpolate(inplace=True)
|
||
|
||
row.calc_dist_from_gps()
|
||
rowdata = row.df
|
||
rowdata['cum_dist'] = rowdata['gps_dist_calculated']
|
||
|
||
try:
|
||
_ = rowdata[' latitude']
|
||
except KeyError: # pragma: no cover
|
||
dologging(logfile,"No GPS Data")
|
||
dologging(logfile2,"No GPS Data")
|
||
return 0
|
||
|
||
rowdata.rename(columns={
|
||
' latitude': 'latitude',
|
||
' longitude': 'longitude',
|
||
'TimeStamp (sec)': 'time',
|
||
}, inplace=True)
|
||
|
||
rowdata.fillna(method='backfill', inplace=True)
|
||
|
||
rowdata.loc[:, 'time'] = rowdata.loc[:, 'time'].copy()-rowdata.loc[0, 'time']
|
||
rowdata = rowdata.copy()[rowdata['time'] > splitsecond]
|
||
# we may want to expand the time (interpolate)
|
||
|
||
rowdata.loc[:,'dt'] = rowdata['time'].apply(
|
||
lambda x: safetimedelta(x)
|
||
).values
|
||
|
||
rowdata = rowdata.select_dtypes(['number'])
|
||
try:
|
||
rowdata = rowdata.resample('100ms', on='dt').mean()
|
||
rowdata = rowdata.interpolate()
|
||
except TypeError: # pragma: no cover
|
||
pass
|
||
|
||
course = GeoCourse.objects.get(id=courseid)
|
||
polygons = course.polygons.all()
|
||
|
||
|
||
paths = []
|
||
for polygon in polygons:
|
||
path = polygon_to_path(polygon, debug=debug)
|
||
paths.append(path)
|
||
|
||
startsecond = 0
|
||
endsecond = rowdata['time'].max()
|
||
|
||
# check how many times went through start polygon
|
||
try:
|
||
try:
|
||
entrytimes, entrydistances = time_in_path(rowdata, paths[0], maxmin='max', getall=True,
|
||
name=polygons[0].name, logfile=logfile)
|
||
except AttributeError: # pragma: no cover
|
||
entrytimes, entrydistances = time_in_path(rowdata, paths[0], maxmin='max', getall=True,
|
||
name='Start', logfile=logfile)
|
||
logmessage = 'Course id {n}, Record id {m}'.format(n=courseid, m=recordid)
|
||
dologging(logfile,logmessage)
|
||
dologging(logfile2,logmessage)
|
||
logmessage = 'Found {n} entrytimes'.format(n=len(entrytimes))
|
||
dologging(logfile,logmessage)
|
||
dologging(logfile2,logmessage)
|
||
|
||
except InvalidTrajectoryError: # pragma: no cover
|
||
entrytimes = []
|
||
coursecompleted = False
|
||
coursemeters = 0
|
||
coursetimeseconds = 0
|
||
|
||
cseconds = []
|
||
cmeters = []
|
||
ccomplete = []
|
||
startseconds = []
|
||
endseconds = []
|
||
|
||
|
||
for startt in entrytimes:
|
||
logmessage = 'Path starting at {t}'.format(t=startt)
|
||
dologging(logfile, logmessage)
|
||
dologging(logfile2, logmessage)
|
||
rowdata2 = rowdata[rowdata['time'] > (startt-10.)]
|
||
#rowdata2.to_csv('debug_course.csv')
|
||
|
||
(
|
||
coursetimeseconds,
|
||
coursemeters,
|
||
coursecompleted,
|
||
|
||
) = coursetime_paths(rowdata2, paths, polygons=polygons, logfile=logfile)
|
||
(
|
||
coursetimefirst,
|
||
coursemetersfirst,
|
||
firstcompleted
|
||
) = coursetime_first(
|
||
rowdata2, paths, polygons=polygons, logfile=logfile)
|
||
|
||
dologging(logfile, "First time through all gates {t} seconds, {m} meters, completed {c}".format(
|
||
t=coursetimeseconds, m=coursemeters, c=coursecompleted
|
||
))
|
||
|
||
dologging(logfile, "Start time through all gates {t} seconds, {m} meters, completed {c}".format(
|
||
t=coursetimefirst, m=coursemetersfirst, c=firstcompleted
|
||
))
|
||
|
||
coursetimesecondsnet = coursetimeseconds-coursetimefirst
|
||
coursemeters = coursemeters-coursemetersfirst
|
||
|
||
cseconds.append(coursetimesecondsnet)
|
||
cmeters.append(coursemeters)
|
||
ccomplete.append(coursecompleted)
|
||
endseconds.append(coursetimeseconds)
|
||
startseconds.append(coursetimefirst)
|
||
|
||
records = pd.DataFrame({
|
||
'coursetimeseconds': cseconds,
|
||
'coursecompleted': ccomplete,
|
||
'coursemeters': cmeters,
|
||
'startsecond': startseconds,
|
||
'endsecond': endseconds,
|
||
})
|
||
|
||
|
||
records = records.loc[records['coursecompleted'], : ]
|
||
|
||
if len(records):
|
||
coursecompleted = True
|
||
mintime = records['coursetimeseconds'].min()
|
||
coursetimeseconds = records[records['coursetimeseconds'] == mintime]['coursetimeseconds'].min()
|
||
coursemeters = records[records['coursetimeseconds'] == mintime]['coursemeters'].min()
|
||
startsecond = records[records['coursetimeseconds'] == mintime]['startsecond'].min()
|
||
endsecond = records[records['coursetimeseconds'] == mintime]['endsecond'].min()
|
||
else: # pragma: no cover
|
||
coursecompleted = False
|
||
|
||
points = 0
|
||
if coursecompleted:
|
||
if coursedistance == 0:
|
||
coursedistance = coursemeters
|
||
velo = coursedistance/coursetimeseconds
|
||
points = 100*(2.-referencespeed/velo)
|
||
|
||
if mode != 'coursetest':
|
||
record = VirtualRaceResult.objects.get(id=recordid)
|
||
record.duration = totaltime_sec_to_string(coursetimeseconds)
|
||
record.distance=int(coursemeters)
|
||
record.points = points
|
||
record.startsecond = startsecond
|
||
record.endsecond = endsecond
|
||
record.workoutid = workoutid
|
||
record.coursecompleted = 1
|
||
record.save()
|
||
|
||
else: # pragma: no cover
|
||
record = CourseTestResult.objects.get(id=recordid)
|
||
record.duration = totaltime_sec_to_string(coursetimeseconds)
|
||
record.distance = int(coursemeters)
|
||
record.workoutid = workoutid
|
||
record.courseid = courseid
|
||
record.startsecond = startsecond
|
||
record.endsecond = endsecond
|
||
record.points = points
|
||
record.coursecompleted = 1
|
||
record.save()
|
||
|
||
if summary: # pragma: no cover
|
||
try:
|
||
row = rdata(csvfile=f1)
|
||
except IOError: # pragma: no cover
|
||
try:
|
||
row = rdata(csvfile=f1 + '.csv')
|
||
except IOError: # pragma: no cover
|
||
try:
|
||
row = rdata(csvfile=f1 + '.gz')
|
||
except IOError: # pragma: no cover
|
||
pass
|
||
|
||
vals, units, typ = row.updateinterval_metric(
|
||
' AverageBoatSpeed (m/s)', 0.1, mode='larger',
|
||
debug=False, smoothwindow=15.,
|
||
activewindow=[startsecond, endsecond]
|
||
)
|
||
|
||
summary = row.allstats()
|
||
row.write_csv(f1, gzip=True)
|
||
workout = Workout.objects.get(id=workoutid)
|
||
workout.summary = summary
|
||
workout.save()
|
||
|
||
if successemail: # pragma: no cover
|
||
handle_sendemail_coursesucceed(
|
||
useremail, userfirstname, logfile, workoutid
|
||
)
|
||
|
||
os.remove(logfile)
|
||
|
||
return 1
|
||
|
||
else: # pragma: no cover
|
||
record = VirtualRaceResult.objects.get(id=recordid)
|
||
record.duration = totaltime_sec_to_string(0)
|
||
record.distance = 0
|
||
record.workoutid = workoutid
|
||
record.startsecond = startsecond
|
||
record.endsecond = endsecond
|
||
record.points = 0
|
||
record.save()
|
||
|
||
if mode == 'coursetest':
|
||
record = CourseTestResult.objects.get(id=recordid)
|
||
record.duration = totaltime_sec_to_string(0)
|
||
record.distance = 0
|
||
record.workoutid = workoutid
|
||
record.startsecond = startsecond
|
||
record.endsecond = endsecond
|
||
record.points = 0
|
||
record.save()
|
||
|
||
|
||
|
||
# add times for all gates to log file
|
||
dologging(logfile,'--- LOG of all gate times---')
|
||
dologging(logfile2,'--- LOG of all gate times---')
|
||
|
||
for path, polygon in zip(paths, polygons):
|
||
(secs, meters, completed) = coursetime_paths(rowdata,
|
||
[path], polygons=[polygon], logfile=logfile)
|
||
logmessage = " time: {t} seconds, distance: {m} meters".format(t=secs, m=meters)
|
||
dologging(logfile,logmessage)
|
||
dologging(logfile2,logmessage)
|
||
|
||
# send email
|
||
handle_sendemail_coursefail(
|
||
useremail, userfirstname, logfile
|
||
)
|
||
os.remove(logfile)
|
||
|
||
return 2
|
||
|
||
return 0 # pragma: no cover
|
||
|
||
|
||
@app.task(bind=True)
|
||
def handle_getagegrouprecords(self,
|
||
df,
|
||
distances, durations,
|
||
age, sex, weightcategory,
|
||
**kwargs):
|
||
wcdurations = []
|
||
wcpower = []
|
||
|
||
if 'debug' in kwargs: # pragma: no cover
|
||
debug = kwargs['debug']
|
||
else:
|
||
debug = False
|
||
|
||
df = pd.read_json(df)
|
||
|
||
if sex == 'not specified': # pragma: no cover
|
||
return 0
|
||
|
||
for distance in distances:
|
||
worldclasspower = getagegrouprecord(
|
||
age,
|
||
sex=sex,
|
||
distance=distance,
|
||
weightcategory=weightcategory, indf=df,
|
||
)
|
||
velo = (worldclasspower/2.8)**(1./3.)
|
||
if not np.isinf(worldclasspower) and not np.isnan(worldclasspower):
|
||
try:
|
||
duration = distance/velo
|
||
wcdurations.append(duration)
|
||
wcpower.append(worldclasspower)
|
||
except ZeroDivisionError: # pragma: no cover
|
||
pass
|
||
|
||
for duration in durations:
|
||
worldclasspower = getagegrouprecord(
|
||
age,
|
||
sex=sex,
|
||
duration=duration,
|
||
weightcategory=weightcategory, indf=df
|
||
)
|
||
if not np.isinf(worldclasspower) and not np.isnan(worldclasspower):
|
||
try:
|
||
velo = (worldclasspower/2.8)**(1./3.)
|
||
distance = int(60*duration*velo)
|
||
wcdurations.append(60.*duration)
|
||
wcpower.append(worldclasspower)
|
||
except ValueError: # pragma: no cover
|
||
pass
|
||
|
||
update_agegroup_db(age, sex, weightcategory, wcdurations, wcpower,
|
||
debug=debug)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_get_garmin_file(client_id,
|
||
client_secret,
|
||
garmintoken,
|
||
garminrefreshtoken,
|
||
userid,
|
||
url,
|
||
filetype,
|
||
*args,
|
||
**kwargs):
|
||
|
||
dologging('garminlog.log','Fetching URL {url}'.format(url=url))
|
||
|
||
regex = '.*\?id=(\d+)'
|
||
try: # pragma: no cover
|
||
m = re.compile(regex).match(url).group(1)
|
||
garminid = int(m)
|
||
except AttributeError:
|
||
garminid = ''
|
||
|
||
garmin = OAuth1Session(client_id,
|
||
client_secret=client_secret,
|
||
resource_owner_key=garmintoken,
|
||
resource_owner_secret=garminrefreshtoken,
|
||
)
|
||
|
||
filename = 'media/{code}_{id}.'.format(
|
||
code=uuid4().hex[:16],
|
||
id=userid
|
||
)+filetype
|
||
|
||
response = garmin.get(url, stream=True)
|
||
if response.status_code == 200:
|
||
with open(filename, 'wb') as out_file:
|
||
shutil.copyfileobj(response.raw, out_file)
|
||
|
||
del response
|
||
|
||
uploadoptions = {
|
||
'secret': UPLOAD_SERVICE_SECRET,
|
||
'user': userid,
|
||
'file': filename,
|
||
'title': '',
|
||
'workouttype': 'water',
|
||
'boattype': '1x',
|
||
'garminid': garminid,
|
||
}
|
||
session = requests.session()
|
||
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
||
session.headers.update(newHeaders)
|
||
_ = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task(bind=True)
|
||
def long_test_task(self, aantal, debug=False, myjob=None, session_key=None): # pragma: no cover
|
||
myjob = self.request
|
||
|
||
return longtask.longtask(aantal, jobid=myjob.id, debug=debug,
|
||
session_key=session_key)
|
||
|
||
|
||
@app.task(bind=True)
|
||
def long_test_task2(self, aantal, **kwargs): # pragma: no cover
|
||
# debug=False,job=None,jobid='aap'):
|
||
myjob = self.request
|
||
job_id = myjob.id
|
||
|
||
if 'jobkey' in kwargs:
|
||
job_id = kwargs.pop('jobkey')
|
||
|
||
kwargs['jobid'] = job_id
|
||
|
||
return longtask.longtask2(aantal, **kwargs)
|
||
|
||
|
||
# process and update workouts
|
||
|
||
@app.task(bind=True)
|
||
def handle_update_empower(self,
|
||
useremail,
|
||
workoutdicts,
|
||
debug=False, **kwargs): # pragma: no cover
|
||
|
||
myjob = self.request
|
||
job_id = myjob.id
|
||
|
||
if 'jobkey' in kwargs:
|
||
job_id = kwargs.pop('jobkey')
|
||
|
||
aantal = len(workoutdicts)
|
||
counter = 0
|
||
|
||
for workoutdict in workoutdicts:
|
||
wid = workoutdict['id']
|
||
inboard = workoutdict['inboard']
|
||
oarlength = workoutdict['oarlength']
|
||
boattype = workoutdict['boattype']
|
||
f1 = workoutdict['filename']
|
||
|
||
# oarlength consistency checks will be done in view
|
||
|
||
havedata = 1
|
||
try:
|
||
rowdata = rdata(csvfile=f1)
|
||
except IOError:
|
||
try:
|
||
rowdata = rdata(csvfile=f1 + '.csv')
|
||
except IOError:
|
||
try:
|
||
rowdata = rdata(csvfile=f1 + '.gz')
|
||
except IOError:
|
||
havedata = 0
|
||
|
||
progressurl = SITE_URL
|
||
|
||
if debug:
|
||
progressurl = SITE_URL_DEV
|
||
# siteurl = SITE_URL_DEV
|
||
secret = PROGRESS_CACHE_SECRET
|
||
|
||
kwargs['job_id'] = job_id
|
||
|
||
progressurl += "/rowers/record-progress/"
|
||
progressurl += job_id
|
||
|
||
if havedata:
|
||
_ = update_empower(wid, inboard, oarlength, boattype,
|
||
rowdata.df, f1, debug=debug)
|
||
|
||
counter += 1
|
||
|
||
progress = 100.*float(counter)/float(aantal)
|
||
|
||
post_data = {
|
||
"secret": secret,
|
||
"value": progress,
|
||
}
|
||
|
||
_ = requests.post(progressurl, data=post_data)
|
||
|
||
subject = "Rowsandall.com Your Old Empower Oarlock data have been corrected"
|
||
message = """
|
||
We have updated Power and Work per Stroke data according to the instructions by Nielsen-Kellerman.
|
||
"""
|
||
|
||
email = EmailMessage(subject, message,
|
||
'Rowsandall <info@rowsandall.com>',
|
||
[useremail])
|
||
|
||
if 'emailbounced' in kwargs:
|
||
emailbounced = kwargs['emailbounced']
|
||
else:
|
||
emailbounced = False
|
||
|
||
if not emailbounced:
|
||
_ = email.send()
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_calctrimp(id,
|
||
csvfilename,
|
||
ftp,
|
||
sex,
|
||
hrftp,
|
||
hrmax,
|
||
hrmin,
|
||
wps_avg,
|
||
debug=False, **kwargs):
|
||
|
||
|
||
tss = 0
|
||
normp = 0
|
||
trimp = 0
|
||
hrtss = 0
|
||
normv = 0
|
||
normw = 0
|
||
spmtss = 0
|
||
|
||
# check what the real file name is
|
||
if os.path.exists(csvfilename):
|
||
csvfile = csvfilename
|
||
elif os.path.exists(csvfilename+'.csv'): # pragma: no cover
|
||
csvfile = csvfilename+'.csv'
|
||
elif os.path.exists(csvfilename+'.gz'): # pragma: no cover
|
||
csvfile = csvfilename+'.gz'
|
||
else: # pragma: no cover
|
||
return 0
|
||
csvfile = os.path.abspath(csvfile)
|
||
|
||
with grpc.insecure_channel(
|
||
target='localhost:50052',
|
||
options=[('grpc.lb_policy_name', 'pick_first'),
|
||
('grpc.enable_retries', 0), ('grpc.keepalive_timeout_ms',
|
||
10000)]
|
||
) as channel:
|
||
try:
|
||
grpc.channel_ready_future(channel).result(timeout=10)
|
||
except grpc.FutureTimeoutError: # pragma: no cover
|
||
dologging('metrics.log','grpc channel time out in handle_calctrimp')
|
||
return 0
|
||
|
||
stub = metrics_pb2_grpc.MetricsStub(channel)
|
||
req = metrics_pb2.WorkoutMetricsRequest(
|
||
filename=csvfile,
|
||
ftp=ftp,
|
||
sex=sex,
|
||
hrftp=hrftp,
|
||
hrmax=hrmax,
|
||
hrmin=hrmin,
|
||
wpsavg=wps_avg,
|
||
)
|
||
try:
|
||
response = stub.CalcMetrics(req, timeout=60)
|
||
except Exception as e: # pragma: no cover
|
||
dologging('metrics.log',traceback.format_exc())
|
||
return 0
|
||
|
||
tss = response.tss
|
||
normp = response.normp
|
||
trimp = response.trimp
|
||
normv = response.normv
|
||
normw = response.normw
|
||
hrtss = response.hrtss
|
||
spmtss = response.spmtss
|
||
dologging('metrics.log','File {csvfile}. Got tss {tss}, normp {normp} trimp {trimp} normv {normv} normw {normw} hrtss {hrtss} spmtss {spmtss}'.format(
|
||
tss = tss,
|
||
normp = normp,
|
||
trimp = trimp,
|
||
normv = normv,
|
||
normw = normw,
|
||
hrtss = hrtss,
|
||
spmtss = spmtss,
|
||
csvfile=csvfile,
|
||
))
|
||
|
||
|
||
if np.isnan(tss): # pragma: no cover
|
||
tss = 0
|
||
|
||
if np.isnan(normp): # pragma: no cover
|
||
normp = 0
|
||
|
||
if np.isnan(trimp): # pragma: no cover
|
||
trimp = 0
|
||
|
||
if np.isnan(normv): # pragma: no cover
|
||
normv = 0
|
||
|
||
if np.isnan(normw): # pragma: no cover
|
||
normw = 0
|
||
|
||
if np.isnan(hrtss): # pragma: no cover
|
||
hrtss = 0
|
||
|
||
if tss > 1000: # pragma: no cover
|
||
tss = 0
|
||
|
||
if trimp > 1000: # pragma: no cover
|
||
trimp = 0
|
||
|
||
if normp > 2000: # pragma: no cover
|
||
normp = 0
|
||
|
||
if normv > 2000: # pragma: no cover
|
||
normv = 0
|
||
|
||
if normw > 10000: # pragma: no cover
|
||
normw = 0
|
||
|
||
if hrtss > 1000: # pragma: no cover
|
||
hrtss = 0
|
||
|
||
if spmtss > 1000: # pragma: no cover
|
||
spmtss = 0
|
||
|
||
try:
|
||
workout = Workout.objects.get(id=id)
|
||
except Workout.DoesNotExist: # pragma: no cover
|
||
dologging('metrics.log','Could not find workout {id}'.format(id=id))
|
||
return 0
|
||
|
||
workout.rscore = int(tss)
|
||
workout.normp = int(normp)
|
||
workout.trimp = int(trimp)
|
||
workout.hrtss = int(hrtss)
|
||
workout.normv = normv
|
||
workout.normw = normw
|
||
workout.spmtss = int(spmtss)
|
||
workout.save()
|
||
dologging('metrics.log','Saving to workout {id} {obscure}'.format(
|
||
id = id,
|
||
obscure = encoder.encode_hex(id)
|
||
))
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_updatedps(useremail, workoutids, debug=False, **kwargs):
|
||
for wid, f1 in workoutids:
|
||
havedata = 1
|
||
try:
|
||
rowdata = rdata(csvfile=f1)
|
||
except IOError: # pragma: no cover
|
||
try:
|
||
rowdata = rdata(csvfile=f1 + '.csv')
|
||
except IOError:
|
||
try:
|
||
rowdata = rdata(csvfile=f1 + '.gz')
|
||
except IOError:
|
||
havedata = 0
|
||
|
||
if havedata:
|
||
update_strokedata(wid, rowdata.df, debug=debug)
|
||
|
||
subject = "Rowsandall.com Your Distance per Stroke metric has been updated"
|
||
message = "All your workouts now have Distance per Stroke"
|
||
|
||
email = EmailMessage(subject, message,
|
||
'Rowsandall <info@rowsandall.com>',
|
||
[useremail])
|
||
|
||
if 'emailbounced' in kwargs: # pragma: no cover
|
||
emailbounced = kwargs['emailbounced']
|
||
else:
|
||
emailbounced = False
|
||
|
||
if not emailbounced:
|
||
_ = email.send()
|
||
|
||
return 1
|
||
|
||
|
||
def sigdig(value, digits=3):
|
||
try:
|
||
order = int(math.floor(math.log10(math.fabs(value))))
|
||
except (ValueError, TypeError): # pragma: no cover
|
||
return value
|
||
|
||
# return integers as is
|
||
if value % 1 == 0: # pragma: no cover
|
||
return value
|
||
|
||
places = digits - order - 1
|
||
if places > 0:
|
||
fmtstr = "%%.%df" % (places)
|
||
else:
|
||
fmtstr = "%.0f"
|
||
return fmtstr % (round(value, places))
|
||
|
||
@app.task
|
||
def handle_send_email_noinvoice(
|
||
useremail, userfirstname, userlastname, reason, **kwargs):
|
||
|
||
subject = "Idoklad: No invoice created"
|
||
|
||
from_email = 'Rowsandall <admin@rowsandall.com>'
|
||
d = {
|
||
'name': userfirstname+' '+userlastname,
|
||
'siteurl': siteurl,
|
||
'reason': reason,
|
||
}
|
||
|
||
_ = send_template_email(from_email, ["support@rowsandall.com"], subject,
|
||
'paymentconfirmationemail_noinvoice.html',
|
||
d, **kwargs)
|
||
|
||
@app.task
|
||
def handle_send_email_alert(
|
||
useremail, userfirstname, userlastname, rowerfirstname, rowerlastname,
|
||
alertname, stats, **kwargs):
|
||
|
||
if 'othertexts' in kwargs: # pragma: no cover
|
||
othertexts = kwargs['othertexts']
|
||
else:
|
||
othertexts = None
|
||
|
||
report = {}
|
||
try:
|
||
report['Percentage'] = int(stats['percentage'])
|
||
except KeyError: # pragma: no cover
|
||
pass
|
||
|
||
try:
|
||
report['Number of workouts'] = int(stats['workouts'])
|
||
except KeyError: # pragma: no cover
|
||
pass
|
||
|
||
try:
|
||
report['Data set'] = "{a} strokes out of {b}".format(
|
||
a=stats['nr_strokes_qualifying'],
|
||
b=stats['nr_strokes']
|
||
)
|
||
except KeyError: # pragma: no cover
|
||
pass
|
||
|
||
try:
|
||
report['Median'] = sigdig(stats['median'])
|
||
except KeyError: # pragma: no cover
|
||
pass
|
||
|
||
try:
|
||
report['Median of qualifying strokes'] = sigdig(stats['median_q'])
|
||
except KeyError: # pragma: no cover
|
||
pass
|
||
|
||
subject = "Rowsandall.com: {alertname} ({startdate} to {enddate})".format(
|
||
startdate=stats['startdate'],
|
||
enddate=stats['enddate'],
|
||
alertname=alertname,
|
||
)
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'report': report,
|
||
'first_name': userfirstname,
|
||
'last_name': userlastname,
|
||
'startdate': stats['startdate'],
|
||
'enddate': stats['enddate'],
|
||
'siteurl': siteurl,
|
||
'rowerfirstname': rowerfirstname,
|
||
'rowerlastname': rowerlastname,
|
||
'alertname': alertname,
|
||
'othertexts': othertexts,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail], subject,
|
||
'alertemail.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_send_email_transaction(
|
||
username, useremail, amount, **kwargs):
|
||
|
||
subject = "Rowsandall Payment Confirmation"
|
||
|
||
from_email = 'Rowsandall <admin@rowsandall.com>'
|
||
|
||
d = {
|
||
'name': username,
|
||
'siteurl': siteurl,
|
||
'amount': amount,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'paymentconfirmationemail.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_send_email_instantplan_notification(
|
||
username, useremail, amount, planname, startdate, enddate, **kwargs
|
||
): # pragma: no cover
|
||
|
||
subject = "Rowsandall Instant Plan Notification"
|
||
|
||
from_email = 'Rowsandall <admin@rowsandall.com>'
|
||
|
||
d = {
|
||
'name': username,
|
||
'siteurl': siteurl,
|
||
'amount': amount,
|
||
'planname': planname,
|
||
'startdate': startdate,
|
||
'enddate': enddate,
|
||
}
|
||
|
||
_ = send_template_email(from_email, ['roosendaalsander@gmail.com'],
|
||
subject,
|
||
'instantplansold.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_send_email_failed_cancel(
|
||
name, email, username, id, **kwargs):
|
||
|
||
subject = "Rowsandall Subscription Cancellation Error"
|
||
|
||
from_email = 'Rowsandall <admin@rowsandall.com>'
|
||
|
||
d = {
|
||
'name': name,
|
||
'siteurl': siteurl,
|
||
'email': email,
|
||
'username': username,
|
||
'id': id,
|
||
}
|
||
|
||
_ = send_template_email(from_email, ["support@rowsandall.com"],
|
||
subject,
|
||
'cancel_subscription_fail_email.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_send_email_subscription_update(
|
||
username, useremail, planname, recurring, price, amount,
|
||
end_of_billing_period, method, **kwargs):
|
||
|
||
from_email = 'Rowsandall <admin@rowsandall.com>'
|
||
|
||
d = {
|
||
'name': username,
|
||
'siteurl': siteurl,
|
||
'amount': amount,
|
||
'price': price,
|
||
'planname': planname,
|
||
'recurring': recurring,
|
||
'end_of_billing_period': end_of_billing_period,
|
||
}
|
||
|
||
if method == 'down':
|
||
template_name = 'subscription_downgrade_email.html'
|
||
notification_template_name = 'subscription_downgrade_notification.html'
|
||
subject = "Rowsandall Change Confirmation"
|
||
else:
|
||
template_name = 'subscription_update_email.html'
|
||
notification_template_name = 'subscription_update_notification.html'
|
||
subject = "Rowsandall Payment Confirmation"
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
template_name,
|
||
d, **kwargs)
|
||
|
||
_ = send_template_email(from_email, ['info@rowsandall.com'],
|
||
'Subscription Update Notification',
|
||
notification_template_name,
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_send_email_subscription_create(
|
||
username, useremail, planname, recurring, price, amount,
|
||
end_of_billing_period, **kwargs):
|
||
|
||
subject = "Rowsandall Payment Confirmation"
|
||
|
||
from_email = 'Rowsandall <admin@rowsandall.com>'
|
||
|
||
d = {
|
||
'name': username,
|
||
'siteurl': siteurl,
|
||
'amount': amount,
|
||
'price': price,
|
||
'planname': planname,
|
||
'end_of_billing_period': end_of_billing_period,
|
||
'recurring': recurring,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'subscription_create_email.html',
|
||
d, **kwargs)
|
||
|
||
_ = send_template_email(from_email, ['info@rowsandall.com'],
|
||
'Subscription Update Notification',
|
||
'subscription_create_notification.html',
|
||
d, **kwargs)
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_raceregistration(
|
||
useremail, username, registeredname, racename, raceid, **kwargs):
|
||
|
||
subject = "A new competitor has registered for virtual challenge {n}".format(
|
||
n=racename
|
||
)
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'username': username,
|
||
'registeredname': registeredname,
|
||
'siteurl': siteurl,
|
||
'racename': racename,
|
||
'raceid': raceid,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'raceregisteredemail.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
def handle_sendemail_coursesucceed(useremail, username, logfile, workoutid, **kwargs): # pragma: no cover
|
||
|
||
subject = "The validation of your course has succeeded"
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'username': username,
|
||
'workoutid': encoder.encode_hex(workoutid),
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'trajectorysuccessemail.html',
|
||
d,
|
||
attach_file=logfile,
|
||
**kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
def handle_sendemail_coursefail(
|
||
useremail, username, logfile, **kwargs):
|
||
|
||
subject = "The validation of your course has failed"
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'username': username,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'trajectoryfailemail.html',
|
||
d,
|
||
cc=['info@rowsandall.com'],
|
||
attach_file=logfile,
|
||
**kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_optout(
|
||
useremail, username, registeredname, racename, raceid, **kwargs):
|
||
|
||
subject = "{name} has opted out from social media posts around challenge {n}".format(
|
||
n=racename,
|
||
name=registeredname
|
||
)
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'username': username,
|
||
'registeredname': registeredname,
|
||
'siteurl': siteurl,
|
||
'racename': racename,
|
||
'raceid': raceid,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'raceoptoutsocialmedia.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_racesubmission(
|
||
useremail, username, registeredname, racename, raceid, **kwargs):
|
||
|
||
subject = "A new result has been submitted for virtual challenge {n}".format(
|
||
n=racename
|
||
)
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'username': username,
|
||
'siteurl': siteurl,
|
||
'registeredname': registeredname,
|
||
'racename': racename,
|
||
'raceid': raceid,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'racesubmissionemail.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_send_disqualification_email(
|
||
useremail, username, reason, message, racename, **kwargs):
|
||
|
||
subject = "Your result for {n} has been disqualified on rowsandall.com".format(
|
||
n=racename
|
||
)
|
||
|
||
from_email = 'Rowsandall <support@rowsandall.com>'
|
||
|
||
d = {
|
||
'username': username,
|
||
'reason': reason,
|
||
'siteurl': siteurl,
|
||
'message': htmlstrip(message),
|
||
'racename': racename,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'disqualificationemail.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_send_withdraw_email(
|
||
useremail, username, reason, message, racename, **kwargs):
|
||
|
||
subject = "Your result for {n} has been removed on rowsandall.com".format(
|
||
n=racename
|
||
)
|
||
|
||
from_email = 'Rowsandall <support@rowsandall.com>'
|
||
|
||
d = {
|
||
'username': username,
|
||
'reason': reason,
|
||
'siteurl': siteurl,
|
||
'message': htmlstrip(message),
|
||
'racename': racename,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject,
|
||
'withdraw_email.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_expired(useremail, userfirstname, userlastname, expireddate,
|
||
**kwargs):
|
||
subject = "Your rowsandall.com paid account has expired"
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'first_name': userfirstname,
|
||
'last_name': userlastname,
|
||
'siteurl': siteurl,
|
||
'expireddate': expireddate,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject, 'accountexpiredemail.html',
|
||
d, cc=['support@rowsandall.com'], **kwargs)
|
||
return 1
|
||
|
||
@app.task
|
||
def handle_sendemail_newftp(rower,power,mode, **kwargs): # pragma: no cover
|
||
subject = "You may want to update your FTP on rowsandall.com"
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
power = int(power)
|
||
d = {
|
||
'first_name': rower.user.first_name,
|
||
'last_name': rower.user.last_name,
|
||
'siteurl': siteurl,
|
||
'ftp': rower.ftp,
|
||
'newftp': power,
|
||
}
|
||
|
||
|
||
_ = send_template_email(from_email, [rower.user.email],
|
||
subject, 'newftpemail.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
@app.task
|
||
def handle_sendemail_breakthrough(workoutid, useremail,
|
||
userfirstname, userlastname,
|
||
btvalues=pd.DataFrame().to_json(),
|
||
surname=False,
|
||
**kwargs):
|
||
|
||
dologging('tasks.log',btvalues)
|
||
btvalues = pd.read_json(btvalues)
|
||
|
||
try:
|
||
btvalues.sort_values('delta', axis=0, inplace=True)
|
||
except KeyError:
|
||
dologging('tasks.log','KeyError')
|
||
return 0
|
||
|
||
lastname = ''
|
||
|
||
if surname: # pragma: no cover
|
||
lastname = userlastname
|
||
|
||
tablevalues = [
|
||
{'delta': t.delta,
|
||
'time': str(timedelta(seconds=t.delta)),
|
||
'cpvalue': t.cpvalues,
|
||
'pwr': t.pwr
|
||
} for t in btvalues.itertuples()
|
||
]
|
||
|
||
# send email with attachment
|
||
subject = "A breakthrough workout on rowsandall.com"
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'first_name': userfirstname,
|
||
'last_name': lastname,
|
||
'siteurl': siteurl,
|
||
'workoutid': encoder.encode_hex(workoutid),
|
||
'btvalues': tablevalues,
|
||
}
|
||
|
||
dologging('tasks.log',siteurl)
|
||
dologging('tasks.log',json.dumps(tablevalues))
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject, 'breakthroughemail.html',
|
||
d, **kwargs)
|
||
|
||
return 1
|
||
|
||
# send email when a breakthrough workout is uploaded
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_hard(workoutid, useremail,
|
||
userfirstname, userlastname,
|
||
btvalues=pd.DataFrame().to_json(),
|
||
surname=False,
|
||
debug=False, **kwargs):
|
||
|
||
btvalues = pd.read_json(btvalues)
|
||
try:
|
||
btvalues.sort_values('delta', axis=0, inplace=True)
|
||
except KeyError:
|
||
return 0
|
||
|
||
tablevalues = [
|
||
{'delta': t.delta,
|
||
'cpvalue': t.cpvalues,
|
||
'pwr': t.pwr
|
||
} for t in btvalues.itertuples()
|
||
]
|
||
|
||
lastname = ''
|
||
if surname: # pragma: no cover
|
||
lastname = userlastname
|
||
|
||
# send email with attachment
|
||
subject = "That was a pretty hard workout on rowsandall.com"
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'first_name': userfirstname,
|
||
'last_name': lastname,
|
||
'siteurl': siteurl,
|
||
'workoutid': encoder.encode_hex(workoutid),
|
||
'btvalues': tablevalues,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [useremail],
|
||
subject, 'hardemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
# send email when user deletes account
|
||
@app.task
|
||
def handle_sendemail_userdeleted(name, email, debug=False, **kwargs):
|
||
fullemail = 'roosendaalsander@gmail.com'
|
||
subject = 'User account deleted'
|
||
message = 'Sander,\n\n'
|
||
message += 'The user {name} ({email}) has just deleted his account'.format(
|
||
name=name,
|
||
email=email
|
||
)
|
||
email = EmailMessage(subject, message,
|
||
'Rowsandall <info@rowsandall.com>',
|
||
[fullemail])
|
||
|
||
if 'emailbounced' in kwargs: # pragma: no cover
|
||
emailbounced = kwargs['emailbounced']
|
||
else:
|
||
emailbounced = False
|
||
|
||
if not emailbounced:
|
||
_ = email.send()
|
||
|
||
return 1
|
||
|
||
# send email to me when an unrecognized file is uploaded
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_unrecognized(unrecognizedfile, useremail,
|
||
debug=False, **kwargs):
|
||
|
||
# send email with attachment
|
||
fullemail = 'roosendaalsander@gmail.com'
|
||
subject = "Unrecognized file from Rowsandall.com"
|
||
message = "Dear Sander,\n\n"
|
||
message += "Please find attached a file that someone tried to upload to rowsandall.com."
|
||
message += " The file was not recognized as a valid file type.\n\n"
|
||
message += "User Email " + useremail + "\n\n"
|
||
message += "Best Regards, the Rowsandall Team"
|
||
|
||
email = EmailMessage(subject, message,
|
||
'Rowsandall <info@rowsandall.com>',
|
||
[fullemail])
|
||
|
||
try:
|
||
email.attach_file(unrecognizedfile)
|
||
except IOError: # pragma: no cover
|
||
pass
|
||
|
||
if 'emailbounced' in kwargs: # pragma: no cover
|
||
emailbounced = kwargs['emailbounced']
|
||
else:
|
||
emailbounced = False
|
||
|
||
if not emailbounced:
|
||
_ = email.send()
|
||
|
||
# remove tcx file
|
||
try:
|
||
os.remove(unrecognizedfile)
|
||
except: # pragma: no cover
|
||
pass
|
||
|
||
return 1
|
||
|
||
|
||
# send email to owner when an unrecognized file is uploaded
|
||
@app.task
|
||
def handle_sendemail_unrecognizedowner(useremail, userfirstname,
|
||
debug=False, **kwargs):
|
||
|
||
# send email with attachment
|
||
fullemail = useremail
|
||
subject = "Unrecognized file from Rowsandall.com"
|
||
|
||
d = {
|
||
'first_name': userfirstname,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'unrecognizedemail.html', d,
|
||
**kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemailics(first_name, last_name, email, icsfile, **kwargs):
|
||
# send email with attachment
|
||
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
||
subject = "Calendar File from Rowsandall.com"
|
||
|
||
d = {'first_name': first_name,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'icsemail.html', d,
|
||
attach_file=icsfile, **kwargs)
|
||
|
||
os.remove(icsfile)
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemailkml(first_name, last_name, email, kmlfile, **kwargs):
|
||
|
||
# send email with attachment
|
||
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
||
subject = "File from Rowsandall.com"
|
||
|
||
d = {'first_name': first_name,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'kmlemail.html', d,
|
||
attach_file=kmlfile, **kwargs)
|
||
|
||
os.remove(kmlfile)
|
||
return 1
|
||
|
||
# Send email with TCX attachment
|
||
|
||
|
||
@app.task
|
||
def handle_sendemailtcx(first_name, last_name, email, tcxfile, **kwargs):
|
||
|
||
# send email with attachment
|
||
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
||
subject = "File from Rowsandall.com"
|
||
|
||
d = {'first_name': first_name,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'tcxemail.html', d,
|
||
attach_file=tcxfile, **kwargs)
|
||
|
||
os.remove(tcxfile)
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_zip_file(emailfrom, subject, file, **kwargs): # pragma: no cover
|
||
message = "... zip processing ... "
|
||
try:
|
||
debug = kwargs['debug']
|
||
except KeyError:
|
||
debug = False
|
||
|
||
if debug:
|
||
print(message)
|
||
|
||
email = EmailMessage(subject, message,
|
||
emailfrom,
|
||
['workouts@rowsandall.com'])
|
||
email.attach_file(file)
|
||
if debug:
|
||
print("attaching")
|
||
|
||
_ = email.send()
|
||
|
||
if debug:
|
||
print("sent")
|
||
time.sleep(60)
|
||
return 1
|
||
|
||
# Send email with CSV attachment
|
||
|
||
|
||
@app.task
|
||
def handle_sendemailsummary(first_name, last_name, email, csvfile, **kwargs):
|
||
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
||
subject = "File from Rowsandall.com"
|
||
|
||
d = {'first_name': first_name,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'summarymail.html', d,
|
||
attach_file=csvfile,
|
||
**kwargs)
|
||
|
||
try:
|
||
os.remove(csvfile)
|
||
except: # pragma: no cover
|
||
pass
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemailcsv(first_name, last_name, email, csvfile, **kwargs):
|
||
|
||
# send email with attachment
|
||
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
||
subject = "File from Rowsandall.com"
|
||
|
||
d = {'first_name': first_name,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'csvemail.html', d,
|
||
attach_file=csvfile, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_ical(first_name, last_name, email, url, icsfile, **kwargs):
|
||
# send email with attachment
|
||
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
||
subject = "Calendar File for your sessions from Rowsandall.com"
|
||
|
||
if 'debug' in kwargs: # pragma: no cover
|
||
debug = kwargs['debug']
|
||
else:
|
||
debug = False
|
||
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
# progressurl = SITE_URL_DEV
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {'first_name': first_name,
|
||
'siteurl': siteurl,
|
||
'url': url,
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'icsemail.html', d,
|
||
attach_file=icsfile, **kwargs)
|
||
|
||
try:
|
||
os.remove(icsfile)
|
||
except: # pragma: no cover
|
||
pass
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemailfile(first_name, last_name, email, csvfile, **kwargs):
|
||
|
||
# send email with attachment
|
||
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
||
subject = "File from Rowsandall.com"
|
||
|
||
d = {'first_name': first_name,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'fileemail.html', d,
|
||
attach_file=csvfile, **kwargs)
|
||
|
||
if 'delete' in kwargs: # pragma: no cover
|
||
dodelete = kwargs['delete']
|
||
else:
|
||
dodelete = False
|
||
|
||
if dodelete: # pragma: no cover
|
||
try:
|
||
os.remove(csvfile)
|
||
except:
|
||
pass
|
||
|
||
return 1
|
||
|
||
# Calculate wind and stream corrections for OTW rowing
|
||
|
||
|
||
@app.task(bind=True)
|
||
def handle_otwsetpower(self, f1, boattype, boatclass, coastalbrand, weightvalue,
|
||
first_name, last_name, email, workoutid,
|
||
**kwargs):
|
||
|
||
myjob = self.request
|
||
job_id = myjob.id
|
||
|
||
if 'jobkey' in kwargs:
|
||
job_id = kwargs.pop('jobkey')
|
||
if 'ps' in kwargs: # pragma: no cover
|
||
ps = kwargs['ps']
|
||
else:
|
||
ps = [1, 1, 1, 1]
|
||
|
||
if 'ratio' in kwargs: # pragma: no cover
|
||
ratio = kwargs['ratio']
|
||
else:
|
||
ratio = 1.0
|
||
if 'debug' in kwargs: # pragma: no cover
|
||
debug = kwargs['debug']
|
||
else:
|
||
debug = False
|
||
|
||
kwargs['jobid'] = job_id
|
||
|
||
weightvalue = float(weightvalue)
|
||
|
||
# check what the real file name is
|
||
if os.path.exists(f1):
|
||
csvfile = f1
|
||
elif os.path.exists(f1+'.csv'): # pragma: no cover
|
||
csvfile = f1+'.csv'
|
||
elif os.path.exists(f1+'.gz'): # pragma: no cover
|
||
csvfile = f1+'.gz'
|
||
else: # pragma: no cover
|
||
return 0
|
||
|
||
csvfile = os.path.abspath(csvfile)
|
||
|
||
# do something with boat type
|
||
try:
|
||
rowdata = rdata(csvfile=csvfile)
|
||
except IOError: # pragma: no cover
|
||
try:
|
||
rowdata = rdata(csvfile=csvfile)
|
||
except IOError:
|
||
rowdata = rdata(csvfile=csvfile)
|
||
|
||
# do calculation, but do not overwrite NK Empower Power data
|
||
powermeasured = False
|
||
try: # pragma: no cover
|
||
w = rowdata.df['wash']
|
||
if w.mean() != 0:
|
||
powermeasured = True
|
||
except KeyError:
|
||
pass
|
||
|
||
progressurl = SITE_URL
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
progressurl = SITE_URL_DEV
|
||
siteurl = SITE_URL_DEV
|
||
secret = PROGRESS_CACHE_SECRET
|
||
|
||
progressurl += "/rowers/record-progress/"
|
||
progressurl += job_id+'/'
|
||
|
||
# do something (this should return from go service)
|
||
with grpc.insecure_channel(
|
||
target='localhost:50051',
|
||
options=[('grpc.lb_policy_name', 'pick_first'),
|
||
('grpc.enable_retries', 0), ('grpc.keepalive_timeout_ms',
|
||
10000)]
|
||
) as channel:
|
||
try:
|
||
grpc.channel_ready_future(channel).result(timeout=10)
|
||
except grpc.FutureTimeoutError: # pragma: no cover
|
||
return 0
|
||
|
||
stub = calculator_pb2_grpc.PowerStub(channel)
|
||
response = stub.CalcPower(calculator_pb2.WorkoutPowerRequest(
|
||
filename=csvfile,
|
||
boattype=boattype,
|
||
coastalbrand=coastalbrand,
|
||
crewmass=weightvalue,
|
||
powermeasured=powermeasured,
|
||
progressurl=progressurl,
|
||
secret=secret,
|
||
silent=False,
|
||
boatclass=boatclass,
|
||
), timeout=1200)
|
||
result = response.result
|
||
if result == 0: # pragma: no cover
|
||
# send failure email
|
||
return 0
|
||
# do something with boat type
|
||
try:
|
||
rowdata = rdata(csvfile=csvfile)
|
||
except IOError: # pragma: no cover
|
||
try:
|
||
rowdata = rdata(csvfile=csvfile)
|
||
except IOError:
|
||
rowdata = rdata(csvfile=csvfile)
|
||
|
||
update_strokedata(workoutid, rowdata.df, debug=debug)
|
||
|
||
totaltime = rowdata.df['TimeStamp (sec)'].max(
|
||
) - rowdata.df['TimeStamp (sec)'].min()
|
||
try:
|
||
totaltime = totaltime + rowdata.df.loc[0, ' ElapsedTime (sec)']
|
||
except KeyError: # pragma: no cover
|
||
pass
|
||
df = getsmallrowdata_pd(
|
||
['power', 'workoutid', 'time'], ids=[workoutid],
|
||
debug=debug)
|
||
|
||
thesecs = totaltime
|
||
maxt = 1.05 * thesecs
|
||
logarr = datautils.getlogarr(maxt)
|
||
dfgrouped = df.groupby(['workoutid'])
|
||
delta, cpvalues, avgpower = datautils.getcp(dfgrouped, logarr)
|
||
|
||
res, btvalues, res2 = utils.isbreakthrough(
|
||
delta, cpvalues, ps[0], ps[1], ps[2], ps[3], ratio)
|
||
if res: # pragma: no cover
|
||
handle_sendemail_breakthrough(
|
||
workoutid, email,
|
||
first_name,
|
||
last_name, btvalues=btvalues.to_json())
|
||
|
||
subject = "Your OTW Physics Calculations are ready"
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
fullemail = first_name + " " + last_name + " " + "<" + email + ">"
|
||
|
||
d = {
|
||
'first_name': first_name,
|
||
'siteurl': siteurl,
|
||
'workoutid': encoder.encode_hex(workoutid),
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'otwpoweremail.html', d,
|
||
**kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
|
||
@app.task
|
||
def handle_makeplot(f1, f2, t, hrdata, plotnr, imagename,
|
||
debug=False, **kwargs):
|
||
|
||
hrmax = hrdata['hrmax']
|
||
hrut2 = hrdata['hrut2']
|
||
hrut1 = hrdata['hrut1']
|
||
hrat = hrdata['hrat']
|
||
hrtr = hrdata['hrtr']
|
||
hran = hrdata['hran']
|
||
ftp = hrdata['ftp']
|
||
powerzones = deserialize_list(hrdata['powerzones'])
|
||
hrzones = deserialize_list(hrdata['hrzones'])
|
||
powerperc = np.array(deserialize_list(hrdata['powerperc'])).astype(float)
|
||
|
||
rr = rowingdata.rower(hrmax=hrmax, hrut2=hrut2,
|
||
hrut1=hrut1, hrat=hrat,
|
||
hrtr=hrtr, hran=hran,
|
||
ftp=ftp, powerperc=powerperc,
|
||
powerzones=powerzones,
|
||
hrzones=hrzones)
|
||
try:
|
||
row = rdata(csvfile=f2, rower=rr)
|
||
except IOError: # pragma: no cover
|
||
row = rdata(csvfile=f2 + '.gz', rower=rr)
|
||
|
||
try:
|
||
haspower = row.df[' Power (watts)'].mean() > 50
|
||
except (TypeError, KeyError):
|
||
haspower = False
|
||
|
||
oterange = kwargs.pop('oterange', [85, 240])
|
||
otwrange = kwargs.pop('otwrange', [85, 185])
|
||
|
||
nr_rows = len(row.df)
|
||
if (plotnr in [1, 2, 4, 5, 8, 11, 9, 12]) and (nr_rows > 1200): # pragma: no cover
|
||
bin = int(nr_rows / 1200.)
|
||
try:
|
||
df = row.df.select_dtypes(['number'])
|
||
df = df.groupby(lambda x: x / bin).mean()
|
||
row.df = df
|
||
except:
|
||
pass
|
||
nr_rows = len(row.df)
|
||
if (plotnr == 1):
|
||
fig1 = row.get_timeplot_erg(t, pacerange=oterange, **kwargs)
|
||
elif (plotnr == 2):
|
||
fig1 = row.get_metersplot_erg(t, pacerange=oterange, **kwargs)
|
||
elif (plotnr == 3):
|
||
try:
|
||
t += ' - Heart Rate Distribution'
|
||
except TypeError: # pragma: no cover
|
||
t = 'Heart Rate Distribution'
|
||
fig1 = row.get_piechart(t, **kwargs)
|
||
elif (plotnr == 4):
|
||
if haspower: # pragma: no cover
|
||
fig1 = row.get_timeplot_otwempower(t, pacerange=otwrange, **kwargs)
|
||
else:
|
||
fig1 = row.get_timeplot_otw(t, pacerange=otwrange, **kwargs)
|
||
elif (plotnr == 5):
|
||
if haspower: # pragma: no cover
|
||
fig1 = row.get_metersplot_otwempower(
|
||
t, pacerange=otwrange, **kwargs)
|
||
else:
|
||
fig1 = row.get_metersplot_otw(t, pacerange=otwrange, **kwargs)
|
||
elif (plotnr == 6):
|
||
t += ' - Heart Rate Distribution'
|
||
fig1 = row.get_piechart(t, **kwargs)
|
||
elif (plotnr == 7) or (plotnr == 10):
|
||
fig1 = row.get_metersplot_erg2(t, **kwargs)
|
||
elif (plotnr == 8) or (plotnr == 11):
|
||
fig1 = row.get_timeplot_erg2(t, **kwargs)
|
||
elif (plotnr == 9) or (plotnr == 12):
|
||
fig1 = row.get_time_otwpower(t, pacerange=otwrange, **kwargs)
|
||
elif (plotnr == 13) or (plotnr == 16):
|
||
t += ' - Power Distribution'
|
||
fig1 = row.get_power_piechart(t, **kwargs)
|
||
|
||
if fig1 is None: # pragma: no cover
|
||
return 0
|
||
|
||
canvas = FigureCanvas(fig1)
|
||
|
||
canvas.print_figure('static/plots/' + imagename)
|
||
plt.close(fig1)
|
||
fig1.clf()
|
||
gc.collect()
|
||
|
||
return imagename
|
||
|
||
# Team related remote tasks
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_coachrequest(email, name, code, coachname,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = email
|
||
subject = 'Invitation to add {n} to your athletes'.format(n=name)
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'name': name,
|
||
'coach': coachname,
|
||
'code': code,
|
||
'siteurl': siteurl
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'coachrequestemail.html', d,
|
||
**kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_coachoffer_rejected(coachemail, coachname, name,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = coachemail
|
||
subject = '{n} has rejected your offer to be his coach on rowsandall.com'.format(
|
||
n=name)
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'name': name,
|
||
'coach': coachname,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'coachofferrejectedemail.html',
|
||
d,
|
||
**kwargs)
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_coachrequest_rejected(email, coachname, name,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = email
|
||
subject = '{n} has rejected your coaching request on rowsandall.com'.format(
|
||
n=coachname)
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'name': name,
|
||
'coach': coachname,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'coachrequestrejectedemail.html',
|
||
d,
|
||
**kwargs)
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_coachrequest_accepted(email, coachname, name,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = email
|
||
subject = '{n} has accepted your coaching request on rowsandall.com'.format(
|
||
n=coachname)
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'name': name,
|
||
'coach': coachname,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'coachrequestacceptedemail.html',
|
||
d,
|
||
**kwargs)
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_coachoffer_accepted(coachemail, coachname, name,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = coachemail
|
||
subject = '{n} has accepted your coaching offer on rowsandall.com'.format(
|
||
n=name)
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'name': name,
|
||
'coach': coachname,
|
||
'siteurl': siteurl,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'coachofferacceptedemail.html',
|
||
d,
|
||
**kwargs)
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_coacheerequest(email, name, code, coachname,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = email
|
||
subject = '{n} requests coach access to your data on rowsandall.com'.format(
|
||
n=coachname)
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'name': name,
|
||
'coach': coachname,
|
||
'code': code,
|
||
'siteurl': siteurl
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'coacheerequestemail.html', d,
|
||
**kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_invite(email, name, code, teamname, manager,
|
||
debug=False, **kwargs):
|
||
fullemail = email
|
||
subject = 'Invitation to join team ' + teamname
|
||
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'name': name,
|
||
'manager': manager,
|
||
'code': code,
|
||
'teamname': teamname,
|
||
'siteurl': siteurl
|
||
}
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
_ = send_template_email(from_email, [fullemail],
|
||
subject, 'teaminviteemail.html', d,
|
||
**kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemailnewresponse(first_name, last_name,
|
||
email,
|
||
commenter_first_name,
|
||
commenter_last_name,
|
||
comment,
|
||
workoutname, workoutid, commentid,
|
||
debug=False, **kwargs):
|
||
fullemail = email
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
subject = 'New comment on session ' + workoutname
|
||
|
||
comment = u''+comment
|
||
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
sessiontype = 'workout'
|
||
if 'sessiontype' in kwargs: # pragma: no cover
|
||
sessiontype = kwargs.pop('sessiontype')
|
||
|
||
commentlink = '/rowers/workout/{workoutid}/comment/'.format(
|
||
workoutid=encoder.encode_hex(workoutid))
|
||
if 'commentlink' in kwargs: # pragma: no cover
|
||
commentlink = kwargs.pop('commentlink')
|
||
|
||
d = {
|
||
'first_name': first_name,
|
||
'commenter_first_name': commenter_first_name,
|
||
'commenter_last_name': commenter_last_name,
|
||
'comment': comment,
|
||
'workoutname': workoutname,
|
||
'siteurl': siteurl,
|
||
'workoutid': workoutid,
|
||
'commentid': commentid,
|
||
'sessiontype': sessiontype,
|
||
'commentlink': commentlink,
|
||
}
|
||
|
||
_ = send_template_email(from_email,
|
||
[fullemail],
|
||
subject, 'teamresponseemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemailnewcomment(first_name,
|
||
last_name,
|
||
email,
|
||
commenter_first_name,
|
||
commenter_last_name,
|
||
comment, workoutname,
|
||
workoutid,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = email
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
subject = 'New comment on session ' + workoutname
|
||
|
||
comment = u''+comment
|
||
|
||
siteurl = SITE_URL
|
||
if debug:
|
||
siteurl = SITE_URL_DEV
|
||
|
||
sessiontype = 'workout'
|
||
if 'sessiontype' in kwargs: # pragma: no cover
|
||
sessiontype = kwargs.pop('sessiontype')
|
||
|
||
commentlink = '/rowers/workout/{workoutid}/comment/'.format(
|
||
workoutid=encoder.encode_hex(workoutid))
|
||
if 'commentlink' in kwargs: # pragma: no cover
|
||
commentlink = kwargs.pop('commentlink')
|
||
|
||
d = {
|
||
'first_name': first_name,
|
||
'commenter_first_name': commenter_first_name,
|
||
'commenter_last_name': commenter_last_name,
|
||
'comment': comment,
|
||
'workoutname': workoutname,
|
||
'siteurl': siteurl,
|
||
'workoutid': encoder.encode_hex(workoutid),
|
||
'sessiontype': sessiontype,
|
||
'commentlink': commentlink,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teamresponseemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_send_template_email(template, email, fromemail, rowername,
|
||
subject, message, debug=False, **kwargs):
|
||
|
||
fullemail = [email]
|
||
d = {
|
||
'message': message,
|
||
'rowername': rowername,
|
||
}
|
||
|
||
_ = send_template_email('Rowsandall <info@rowsandall.com>',
|
||
['info@rowsandall.com'], subject,
|
||
template, d, cc=[fromemail], bcc=fullemail, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_message(email, fromemail, rowername, message, teamname, managername,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = email
|
||
subject = 'New message from team ' + teamname
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
d = {
|
||
'rowername': rowername,
|
||
'teamname': teamname,
|
||
'managername': managername,
|
||
'message': message,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teammessage.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_request(email, name, code, teamname, requestor, id,
|
||
debug=False, **kwargs):
|
||
fullemail = email
|
||
subject = 'Request to join team ' + teamname
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
siteurl = SITE_URL
|
||
if debug: # pragma: no cover
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'requestor': requestor,
|
||
'teamname': teamname,
|
||
'code': code,
|
||
'siteurl': siteurl,
|
||
'id': id,
|
||
'first_name': name,
|
||
}
|
||
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teamrequestemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_request_accept(email, name, teamname, managername,
|
||
debug=False, **kwargs):
|
||
fullemail = email
|
||
subject = 'Welcome to ' + teamname
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
siteurl = SITE_URL
|
||
if debug:
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'first_name': name,
|
||
'managername': managername,
|
||
'teamname': teamname,
|
||
'siteurl': siteurl,
|
||
}
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teamwelcomeemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_request_reject(email, name, teamname, managername,
|
||
debug=False, **kwargs):
|
||
fullemail = email
|
||
subject = 'Your application to ' + teamname + ' was rejected'
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
siteurl = SITE_URL
|
||
if debug:
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'first_name': name,
|
||
'managername': managername,
|
||
'teamname': teamname,
|
||
'siteurl': siteurl,
|
||
}
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teamrejectemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_member_dropped(email, name, teamname, managername,
|
||
debug=False, **kwargs):
|
||
fullemail = email
|
||
subject = 'You were removed from ' + teamname
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
siteurl = SITE_URL
|
||
if debug:
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'first_name': name,
|
||
'managername': managername,
|
||
'teamname': teamname,
|
||
'siteurl': siteurl,
|
||
}
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teamdropemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_team_removed(email, name, teamname, managername,
|
||
debug=False, **kwargs):
|
||
|
||
fullemail = email
|
||
subject = 'You were removed from ' + teamname
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
siteurl = SITE_URL
|
||
if debug:
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'first_name': name,
|
||
'managername': managername,
|
||
'teamname': teamname,
|
||
'siteurl': siteurl,
|
||
}
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teamremoveemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_invite_reject(email, name, teamname, managername,
|
||
debug=False, **kwargs):
|
||
fullemail = email
|
||
subject = 'Your invitation to ' + name + ' was rejected'
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
siteurl = SITE_URL
|
||
if debug:
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'first_name': name,
|
||
'managername': managername,
|
||
'teamname': teamname,
|
||
'siteurl': siteurl,
|
||
}
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teaminviterejectemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
|
||
@app.task
|
||
def handle_sendemail_invite_accept(email, name, teamname, managername,
|
||
debug=False, **kwargs):
|
||
fullemail = email
|
||
subject = 'Your invitation to ' + name + ' was accepted'
|
||
|
||
from_email = 'Rowsandall <info@rowsandall.com>'
|
||
|
||
siteurl = SITE_URL
|
||
if debug:
|
||
siteurl = SITE_URL_DEV
|
||
|
||
d = {
|
||
'first_name': name,
|
||
'managername': managername,
|
||
'teamname': teamname,
|
||
'siteurl': siteurl,
|
||
}
|
||
_ = send_template_email(from_email, [fullemail], subject,
|
||
'teaminviteacceptemail.html', d, **kwargs)
|
||
|
||
return 1
|
||
|
||
|
||
# Another simple task for debugging purposes
|
||
def add2(x, y, debug=False, **kwargs): # pragma: no cover
|
||
return x + y
|
||
|
||
|
||
graphql_url = "https://rp3rowing-app.com/graphql"
|
||
|
||
|
||
@app.task
|
||
def handle_update_wps(rid, types, ids, mode, debug=False, **kwargs):
|
||
df = read_data(['time', 'driveenergy'], ids=ids)
|
||
try:
|
||
wps_median = int(df.filter(pl.col("driveenergy")>100)["driveenergy"].median())
|
||
rower = Rower.objects.get(id=rid)
|
||
if mode == 'water':
|
||
rower.median_wps = wps_median
|
||
else: # pragma: no cover
|
||
rower.median_wps_erg = wps_median
|
||
|
||
rower.save()
|
||
except ValueError: # pragma: no cover
|
||
wps_median = 0
|
||
except OverflowError:
|
||
wps_median = 0
|
||
except ColumnNotFoundError:
|
||
wps_median = 0
|
||
except TypeError:
|
||
wps_median = 0
|
||
except ComputeError:
|
||
wps_median = 0
|
||
|
||
return wps_median
|
||
|
||
|
||
@app.task
|
||
def handle_rp3_async_workout(userid, rp3token, rp3id, startdatetime, max_attempts, debug=False, **kwargs):
|
||
|
||
timezone = kwargs.get('timezone', 'UTC')
|
||
|
||
headers = {'Authorization': 'Bearer ' + rp3token}
|
||
|
||
get_download_link = """{
|
||
download(workout_id: """ + str(rp3id) + """, type:csv){
|
||
id
|
||
status
|
||
link
|
||
}
|
||
}"""
|
||
|
||
have_link = False
|
||
download_url = ''
|
||
counter = 0
|
||
|
||
waittime = 3
|
||
while not have_link:
|
||
try:
|
||
response = requests.post(
|
||
url=graphql_url,
|
||
headers=headers,
|
||
json={'query': get_download_link}
|
||
)
|
||
dologging('rp3_import.log',response.status_code)
|
||
|
||
if response.status_code != 200: # pragma: no cover
|
||
have_link = True
|
||
|
||
workout_download_details = pd.json_normalize(
|
||
response.json()['data']['download'])
|
||
dologging('rp3_import.log', response.json())
|
||
except: # pragma: no cover
|
||
return 0
|
||
|
||
if workout_download_details.iat[0, 1] == 'ready':
|
||
download_url = workout_download_details.iat[0, 2]
|
||
have_link = True
|
||
|
||
dologging('rp3_import.log', download_url)
|
||
|
||
counter += 1
|
||
|
||
dologging('rp3_import.log', counter)
|
||
|
||
if counter > max_attempts: # pragma: no cover
|
||
have_link = True
|
||
|
||
time.sleep(waittime)
|
||
|
||
if download_url == '': # pragma: no cover
|
||
return 0
|
||
|
||
filename = 'media/RP3Import_'+str(rp3id)+'.csv'
|
||
|
||
res = requests.get(download_url, headers=headers)
|
||
dologging('rp3_import.log','tasks.py '+str(rp3id))
|
||
dologging('rp3_import.log',startdatetime)
|
||
|
||
if not startdatetime: # pragma: no cover
|
||
startdatetime = str(timezone.now())
|
||
|
||
try:
|
||
startdatetime = str(startdatetime)
|
||
except: # pragma: no cover
|
||
pass
|
||
|
||
if res.status_code != 200: # pragma: no cover
|
||
return 0
|
||
|
||
with open(filename, 'wb') as f:
|
||
# dologging('rp3_import.log',res.text)
|
||
dologging('rp3_import.log', 'Rp3 ID = {id}'.format(id=rp3id))
|
||
f.write(res.content)
|
||
|
||
uploadoptions = {
|
||
'secret': UPLOAD_SERVICE_SECRET,
|
||
'user': userid,
|
||
'file': filename,
|
||
'workouttype': 'rower',
|
||
'boattype': 'rp3',
|
||
'rp3id': int(rp3id),
|
||
'startdatetime': startdatetime,
|
||
'timezone': timezone,
|
||
}
|
||
|
||
session = requests.session()
|
||
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
||
session.headers.update(newHeaders)
|
||
|
||
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
|
||
|
||
if response.status_code != 200: # pragma: no cover
|
||
return 0
|
||
|
||
workoutid = response.json()['id']
|
||
|
||
return workoutid
|
||
|
||
|
||
@app.task
|
||
def handle_nk_async_workout(alldata, userid, nktoken, nkid, delaysec, defaulttimezone, debug=False, **kwargs):
|
||
time.sleep(delaysec)
|
||
|
||
s = 'Importing from NK Logbook ID {nkid}'.format(nkid=nkid)
|
||
dologging('nklog.log', s)
|
||
|
||
try:
|
||
data = alldata[nkid]
|
||
except KeyError: # pragma: no cover
|
||
try:
|
||
data = alldata[int(nkid)]
|
||
except KeyError:
|
||
return 0
|
||
|
||
|
||
params = {
|
||
'sessionIds': nkid,
|
||
}
|
||
|
||
authorizationstring = str('Bearer ' + nktoken)
|
||
headers = {'Authorization': authorizationstring,
|
||
'user-agent': 'sanderroosendaal',
|
||
'Content-Type': 'application/json',
|
||
}
|
||
|
||
# get strokes
|
||
url = NK_API_LOCATION+"api/v1/sessions/strokes"
|
||
response = requests.get(url, headers=headers, params=params)
|
||
|
||
if response.status_code != 200: # pragma: no cover
|
||
# error handling and logging
|
||
dologging('nklog.log', 'Response status code {code}'.format(
|
||
code=response.status_code))
|
||
return 0
|
||
|
||
jsonData = response.json()
|
||
try:
|
||
strokeData = jsonData[str(nkid)]
|
||
except KeyError: # pragma: no cover
|
||
dologging('nklog.log','Could not find strokeData')
|
||
return 0
|
||
|
||
seatNumber = 1
|
||
try:
|
||
oarlockSessions = data['oarlockSessions']
|
||
if oarlockSessions:
|
||
oarlocksession = oarlockSessions[0]
|
||
seatNumber = oarlocksession['seatNumber']
|
||
except KeyError: # pragma: no cover
|
||
pass
|
||
|
||
df = strokeDataToDf(strokeData, seatIndex=seatNumber)
|
||
|
||
csvfilename = 'media/{code}_{nkid}.csv.gz'.format(
|
||
nkid=nkid,
|
||
code=uuid4().hex[:16]
|
||
)
|
||
|
||
df.to_csv(csvfilename, index_label='index', compression='gzip')
|
||
|
||
workoutid, error = add_workout_from_data(userid, nkid, data, df)
|
||
|
||
# dologging('nklog.log','NK Workout ID {id}'.format(id=workoutid))
|
||
if workoutid == 0: # pragma: no cover
|
||
return 0
|
||
|
||
try:
|
||
workout = Workout.objects.get(id=workoutid)
|
||
newnkid = workout.uploadedtonk
|
||
sr = create_or_update_syncrecord(workout.user, workout, nkid=newnkid)
|
||
except Workout.DoesNotExist: # pragma: no cover
|
||
pass
|
||
|
||
return workoutid
|
||
|
||
@app.task
|
||
def handle_intervals_updateworkout(workout, debug=False, **kwargs):
|
||
rower = workout.user
|
||
intervalstoken = rower.intervals_token
|
||
authorizationstring = str('Bearer ' + intervalstoken)
|
||
|
||
headers = {
|
||
'authorization': authorizationstring
|
||
}
|
||
|
||
thetype = mytypes.intervalsmapping[workout.workouttype]
|
||
jsondict = {'type': thetype}
|
||
subtype = workout.sub_type
|
||
if subtype == "Warming Up":
|
||
jsondict['sub_type'] = "WARMUP"
|
||
elif subtype == "Cooling Down":
|
||
jsondict['sub_type'] = "COOLDOWN"
|
||
elif subtype == "Commute":
|
||
jsondict['commute'] = True
|
||
jsondict['sub_type'] = "COMMUTE"
|
||
else:
|
||
jsondict['sub_type'] = "NONE"
|
||
|
||
jsondict['name'] = workout.name
|
||
|
||
if workout.rpe is not None and workout.rpe > 0:
|
||
jsondict['icu_rpe'] = workout.rpe
|
||
jsondict['commute'] = workout.is_commute
|
||
if workout.plannedsession:
|
||
jsondict['paired_event_id'] = workout.plannedsession.intervals_icu_id
|
||
|
||
url = "https://intervals.icu/api/v1/activity/{activityid}".format(activityid=workout.uploadedtointervals)
|
||
response = requests.put(url, headers=headers, json=jsondict)
|
||
|
||
response = requests.put(url, headers=headers, json=jsondict)
|
||
|
||
if response.status_code not in [200, 201]:
|
||
dologging('intervals.icu.log', response.reason)
|
||
return 0
|
||
|
||
|
||
@app.task
|
||
def handle_intervals_getworkout(rower, intervalstoken, workoutid, debug=False, **kwargs):
|
||
authorizationstring = str('Bearer '+intervalstoken)
|
||
headers = {
|
||
'authorization': authorizationstring,
|
||
}
|
||
|
||
url = "https://intervals.icu/api/v1/activity/{}".format(workoutid)
|
||
|
||
response = requests.get(url, headers=headers)
|
||
if response.status_code != 200:
|
||
return 0
|
||
|
||
data = response.json()
|
||
|
||
try:
|
||
workoutsource = data['device_name']
|
||
except KeyError:
|
||
workoutsource = 'intervals.icu'
|
||
|
||
try:
|
||
title = data['name']
|
||
except KeyError:
|
||
title = 'Intervals workout'
|
||
|
||
if 'garmin' in workoutsource.lower():
|
||
title = 'Garmin: '+ title
|
||
|
||
try:
|
||
workouttype = intervalsmappinginv[data['type']]
|
||
except KeyError:
|
||
workouttype = 'water'
|
||
|
||
try:
|
||
rpe = data['icu_rpe']
|
||
except KeyError:
|
||
rpe = 0
|
||
|
||
try:
|
||
is_commute = data['commute']
|
||
if is_commute is None:
|
||
is_commute = False
|
||
except KeyError:
|
||
is_commute = False
|
||
|
||
|
||
try:
|
||
subtype = data['sub_type']
|
||
if subtype is not None:
|
||
subtype = subtype.capitalize()
|
||
except KeyError:
|
||
subtype = None
|
||
|
||
try:
|
||
is_race = data['race']
|
||
if is_race is None:
|
||
is_race = False
|
||
except KeyError:
|
||
is_race = False
|
||
|
||
url = "https://intervals.icu/api/v1/activity/{workoutid}/fit-file".format(workoutid=workoutid)
|
||
|
||
response = requests.get(url, headers=headers)
|
||
|
||
if response.status_code != 200:
|
||
return 0
|
||
|
||
try:
|
||
fit_data = response.content
|
||
fit_filename = 'media/'+f'{uuid4().hex[:16]}.fit'
|
||
with open(fit_filename, 'wb') as fit_file:
|
||
fit_file.write(fit_data)
|
||
except Exception as e:
|
||
return 0
|
||
|
||
try:
|
||
row = FP(fit_filename)
|
||
rowdata = rowingdata.rowingdata(df=row.df)
|
||
rowsummary = FitSummaryData(fit_filename)
|
||
duration = totaltime_sec_to_string(rowdata.duration)
|
||
distance = rowdata.df[" Horizontal (meters)"].iloc[-1]
|
||
except Exception as e:
|
||
return 0
|
||
|
||
uploadoptions = {
|
||
'secret': UPLOAD_SERVICE_SECRET,
|
||
'user': rower.user.id,
|
||
'boattype': '1x',
|
||
'workouttype': workouttype,
|
||
'workoutsource': workoutsource,
|
||
'file': fit_filename,
|
||
'intervalsid': workoutid,
|
||
'title': title,
|
||
'rpe': rpe,
|
||
'notes': '',
|
||
'offline': False,
|
||
}
|
||
|
||
url = UPLOAD_SERVICE_URL
|
||
handle_request_post(url, uploadoptions)
|
||
|
||
try:
|
||
paired_event_id = data['paired_event_id']
|
||
ws = Workout.objects.filter(uploadedtointervals=workoutid)
|
||
for w in ws:
|
||
w.sub_type = subtype
|
||
w.save()
|
||
if is_commute:
|
||
for w in ws:
|
||
w.is_commute = True
|
||
w.sub_type = "Commute"
|
||
w.save()
|
||
if is_race:
|
||
for w in ws:
|
||
w.is_race = True
|
||
w.save()
|
||
if ws.count() > 0:
|
||
pss = PlannedSession.objects.filter(rower=rower,intervals_icu_id=paired_event_id)
|
||
if pss.count() > 0:
|
||
for ps in pss:
|
||
for w in ws:
|
||
w.plannedsession = ps
|
||
w.save()
|
||
except KeyError:
|
||
pass
|
||
except Workout.DoesNotExist:
|
||
pass
|
||
except PlannedSession.DoesNotExist:
|
||
pass
|
||
|
||
return 1
|
||
|
||
@app.task
|
||
def handle_c2_getworkout(userid, c2token, c2id, defaulttimezone, debug=False, **kwargs):
|
||
authorizationstring = str('Bearer ' + c2token)
|
||
headers = {'Authorization': authorizationstring,
|
||
'user-agent': 'sanderroosendaal',
|
||
'Content-Type': 'application/json'}
|
||
url = "https://log.concept2.com/api/users/me/results/"+str(c2id)
|
||
s = requests.get(url, headers=headers)
|
||
|
||
if s.status_code != 200: # pragma: no cover
|
||
return 0
|
||
|
||
data = s.json()['data']
|
||
alldata = {c2id: data}
|
||
|
||
return handle_c2_async_workout(alldata, userid, c2token, c2id, 0, defaulttimezone)
|
||
|
||
|
||
def df_from_summary(data):
|
||
# distance = data['distance']
|
||
# c2id = data['id']
|
||
# workouttype = data['type']
|
||
# verified = data['verified']
|
||
# weightclass = data['weight_class']
|
||
try:
|
||
title = data['name']
|
||
except KeyError: # pragma: no cover
|
||
title = ""
|
||
try:
|
||
t = data['comments'].split('\n', 1)[0]
|
||
title += t[:40]
|
||
except: # pragma: no cover
|
||
title = ''
|
||
|
||
startdatetime, starttime, workoutdate, duration, starttimeunix, timezone = utils.get_startdatetime_from_c2data(
|
||
data)
|
||
|
||
try:
|
||
splits = data['workout']['splits']
|
||
except (KeyError, TypeError): # pragma: no cover
|
||
splits = [0]
|
||
time = starttimeunix
|
||
elapsed_distance = 0
|
||
times = [0]
|
||
distances = [0]
|
||
try:
|
||
spms = [splits[0]['stroke_rate']]
|
||
except (KeyError, TypeError, IndexError): # pragma: no cover
|
||
spms = [0]
|
||
try:
|
||
hrs = [splits[0]['heart_rate']['average']]
|
||
except (KeyError, TypeError, IndexError): # pragma: no cover
|
||
hrs = [0]
|
||
|
||
for split in splits:
|
||
try:
|
||
time += split['time']/10.
|
||
times.append(time)
|
||
except (KeyError, TypeError): # pragma: no cover
|
||
times.append(0)
|
||
try:
|
||
elapsed_distance += split['distance']
|
||
distances.append(elapsed_distance)
|
||
except (KeyError, TypeError): # pragma: no cover
|
||
distances.append(0)
|
||
try:
|
||
spms.append(split['stroke_rate'])
|
||
except (KeyError, TypeError): # pragma: no cover
|
||
spms.append(0)
|
||
try:
|
||
hrs.append(split['heart_rate']['average'])
|
||
except (KeyError, TypeError): # pragma: no cover
|
||
hrs.append(0)
|
||
|
||
df = pd.DataFrame({
|
||
'TimeStamp (sec)': times,
|
||
' Horizontal (meters)': distances,
|
||
' HRCur (bpm)': hrs,
|
||
' Cadence (stokes/min)': spms,
|
||
})
|
||
|
||
df[' ElapsedTime (sec)'] = df['TimeStamp (sec)']-starttimeunix
|
||
|
||
return df
|
||
|
||
|
||
@app.task
|
||
def handle_c2_async_workout(alldata, userid, c2token, c2id, delaysec,
|
||
defaulttimezone, debug=False, **kwargs):
|
||
time.sleep(delaysec)
|
||
dologging('c2_import.log',str(c2id)+' for userid '+str(userid))
|
||
data = alldata[c2id]
|
||
splitdata = None
|
||
|
||
distance = data['distance']
|
||
try: # pragma: no cover
|
||
rest_distance = data['rest_distance']
|
||
# rest_time = data['rest_time']/10.
|
||
except KeyError:
|
||
rest_distance = 0
|
||
# rest_time = 0
|
||
distance = distance+rest_distance
|
||
c2id = data['id']
|
||
dologging('c2_import.log',data['type'])
|
||
if data['type'] in ['rower','dynamic','slides']:
|
||
workouttype = 'rower'
|
||
boattype = data['type']
|
||
if data['type'] == 'rower':
|
||
boattype = 'static'
|
||
else:
|
||
workouttype = data['type']
|
||
boattype = 'static'
|
||
# verified = data['verified']
|
||
|
||
# weightclass = data['weight_class']
|
||
|
||
try:
|
||
has_strokedata = data['stroke_data']
|
||
except KeyError: # pragma: no cover
|
||
has_strokedata = True
|
||
|
||
s = 'User {userid}, C2 ID {c2id}'.format(userid=userid, c2id=c2id)
|
||
dologging('c2_import.log', s)
|
||
dologging('c2_import.log', json.dumps(data))
|
||
|
||
try:
|
||
title = data['name']
|
||
except KeyError:
|
||
title = ""
|
||
try:
|
||
t = data['comments'].split('\n', 1)[0]
|
||
title += t[:40]
|
||
except: # pragma: no cover
|
||
title = ''
|
||
|
||
# Create CSV file name and save data to CSV file
|
||
csvfilename = 'media/{code}_{c2id}.csv.gz'.format(
|
||
code=uuid4().hex[:16], c2id=c2id)
|
||
|
||
startdatetime, starttime, workoutdate, duration, starttimeunix, timezone = utils.get_startdatetime_from_c2data(
|
||
data
|
||
)
|
||
|
||
s = 'Time zone {timezone}, startdatetime {startdatetime}, duration {duration}'.format(
|
||
timezone=timezone, startdatetime=startdatetime,
|
||
duration=duration)
|
||
dologging('c2_import.log', s)
|
||
|
||
authorizationstring = str('Bearer ' + c2token)
|
||
headers = {'Authorization': authorizationstring,
|
||
'user-agent': 'sanderroosendaal',
|
||
'Content-Type': 'application/json'}
|
||
url = "https://log.concept2.com/api/users/me/results/"+str(c2id)+"/strokes"
|
||
try:
|
||
s = requests.get(url, headers=headers)
|
||
except ConnectionError: # pragma: no cover
|
||
return 0
|
||
|
||
if s.status_code != 200: # pragma: no cover
|
||
dologging('c2_import.log', 'No Stroke Data. Status Code {code}'.format(
|
||
code=s.status_code))
|
||
dologging('c2_import.log', s.text)
|
||
has_strokedata = False
|
||
|
||
if not has_strokedata: # pragma: no cover
|
||
df = df_from_summary(data)
|
||
else:
|
||
# dologging('debuglog.log',json.dumps(s.json()))
|
||
try:
|
||
strokedata = pd.DataFrame.from_dict(s.json()['data'])
|
||
except AttributeError: # pragma: no cover
|
||
dologging('c2_import.log', 'No stroke data in stroke data')
|
||
return 0
|
||
|
||
try:
|
||
res = make_cumvalues(0.1*strokedata['t'])
|
||
cum_time = res[0]
|
||
lapidx = res[1]
|
||
except KeyError: # pragma: no cover
|
||
dologging('c2_import.log', 'No time values in stroke data')
|
||
return 0
|
||
|
||
unixtime = cum_time+starttimeunix
|
||
# unixtime[0] = starttimeunix
|
||
seconds = 0.1*strokedata.loc[:, 't']
|
||
|
||
nr_rows = len(unixtime)
|
||
|
||
try: # pragma: no cover
|
||
latcoord = strokedata.loc[:, 'lat']
|
||
loncoord = strokedata.loc[:, 'lon']
|
||
except:
|
||
latcoord = np.zeros(nr_rows)
|
||
loncoord = np.zeros(nr_rows)
|
||
|
||
try:
|
||
strokelength = strokedata.loc[:,'strokelength']
|
||
except: # pragma: no cover
|
||
strokelength = np.zeros(nr_rows)
|
||
|
||
dist2 = 0.1*strokedata.loc[:, 'd']
|
||
cumdist, intervals = make_cumvalues(dist2)
|
||
|
||
try:
|
||
spm = strokedata.loc[:, 'spm']
|
||
except KeyError: # pragma: no cover
|
||
spm = 0*dist2
|
||
|
||
try:
|
||
hr = strokedata.loc[:, 'hr']
|
||
except KeyError: # pragma: no cover
|
||
hr = 0*spm
|
||
|
||
pace = strokedata.loc[:, 'p']/10.
|
||
pace = np.clip(pace, 0, 1e4)
|
||
pace = pace.replace(0, 300)
|
||
|
||
velo = 500./pace
|
||
power = 2.8*velo**3
|
||
if workouttype == 'bike': # pragma: no cover
|
||
velo = 1000./pace
|
||
|
||
dologging('c2_import.log', 'Unix Time Stamp {s}'.format(s=unixtime[0]))
|
||
# dologging('debuglog.log',json.dumps(s.json()))
|
||
|
||
df = pd.DataFrame({'TimeStamp (sec)': unixtime,
|
||
' Horizontal (meters)': dist2,
|
||
' Cadence (stokes/min)': spm,
|
||
' HRCur (bpm)': hr,
|
||
' longitude': loncoord,
|
||
' latitude': latcoord,
|
||
' Stroke500mPace (sec/500m)': pace,
|
||
' Power (watts)': power,
|
||
' DragFactor': np.zeros(nr_rows),
|
||
' DriveLength (meters)': np.zeros(nr_rows),
|
||
' StrokeDistance (meters)': strokelength,
|
||
' DriveTime (ms)': np.zeros(nr_rows),
|
||
' StrokeRecoveryTime (ms)': np.zeros(nr_rows),
|
||
' AverageDriveForce (lbs)': np.zeros(nr_rows),
|
||
' PeakDriveForce (lbs)': np.zeros(nr_rows),
|
||
' lapIdx': lapidx,
|
||
' WorkoutState': 4,
|
||
' ElapsedTime (sec)': seconds,
|
||
'cum_dist': cumdist
|
||
})
|
||
|
||
df.sort_values(by='TimeStamp (sec)', ascending=True)
|
||
|
||
_ = df.to_csv(csvfilename, index_label='index', compression='gzip')
|
||
|
||
|
||
uploadoptions = {
|
||
'secret': UPLOAD_SERVICE_SECRET,
|
||
'user': userid,
|
||
'file': csvfilename,
|
||
'title': title,
|
||
'workouttype': workouttype,
|
||
'boattype': boattype,
|
||
'c2id': c2id,
|
||
'startdatetime': startdatetime.isoformat(),
|
||
'timezone': str(timezone)
|
||
}
|
||
|
||
session = requests.session()
|
||
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
||
session.headers.update(newHeaders)
|
||
|
||
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
|
||
|
||
if response.status_code != 200: # pragma: no cover
|
||
dologging('c2_import.log',
|
||
'Upload API returned status code {code}'.format(
|
||
code=response.status_code))
|
||
return 0
|
||
|
||
workoutid = response.json()['id']
|
||
dologging('c2_import.log','workout id {id}'.format(id=workoutid))
|
||
|
||
workout = Workout.objects.get(id=workoutid)
|
||
newc2id = workout.uploadedtoc2
|
||
|
||
record = create_or_update_syncrecord(workout.user, workout, c2id=newc2id)
|
||
|
||
|
||
# set distance, time
|
||
workout = Workout.objects.get(id=workoutid)
|
||
workout.distance = distance
|
||
workout.duration = duration
|
||
workout.save()
|
||
|
||
# summary
|
||
if 'workout' in data:
|
||
if 'splits' in data['workout']: # pragma: no cover
|
||
splitdata = data['workout']['splits']
|
||
elif 'intervals' in data['workout']: # pragma: no cover
|
||
splitdata = data['workout']['intervals']
|
||
else: # pragma: no cover
|
||
splitdata = False
|
||
else:
|
||
splitdata = False
|
||
|
||
if splitdata: # pragma: no cover
|
||
summary, sa, results = summaryfromsplitdata(
|
||
splitdata, data, csvfilename, workouttype=workouttype)
|
||
|
||
workout = Workout.objects.get(id=workoutid)
|
||
workout.summary = summary
|
||
workout.save()
|
||
|
||
from rowingdata.trainingparser import getlist
|
||
if sa:
|
||
values = getlist(sa)
|
||
units = getlist(sa, sel='unit')
|
||
types = getlist(sa, sel='type')
|
||
|
||
rowdata = rdata(csvfile=csvfilename)
|
||
if rowdata:
|
||
rowdata.updateintervaldata(values, units, types, results)
|
||
|
||
rowdata.write_csv(csvfilename, gzip=True)
|
||
update_strokedata(workoutid, rowdata.df)
|
||
|
||
return workoutid
|
||
|
||
@app.task
|
||
def handle_split_workout_by_intervals(id, debug=False, **kwargs):
|
||
row = Workout.objects.get(id=id)
|
||
r = row.user
|
||
rowdata = rdata(csvfile=row.csvfilename)
|
||
if rowdata == 0:
|
||
messages.error(request,"No Data file found for this workout")
|
||
return HttpResponseRedirect(url)
|
||
|
||
try:
|
||
new_rowdata = rowdata.split_by_intervals()
|
||
except KeyError:
|
||
new_rowdata = rowdata
|
||
return 0
|
||
|
||
interval_i = 1
|
||
for data in new_rowdata:
|
||
filename = 'media/{code}.csv'.format(
|
||
code = uuid4().hex[:16]
|
||
)
|
||
|
||
data.write_csv(filename)
|
||
|
||
uploadoptions = {
|
||
'secret': UPLOAD_SERVICE_SECRET,
|
||
'user': r.user.id,
|
||
'title': '{title} - interval {i}'.format(title=row.name, i=interval_i),
|
||
'file': filename,
|
||
'boattype': row.boattype,
|
||
'workouttype': row.workouttype,
|
||
}
|
||
|
||
session = requests.session()
|
||
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plan'}
|
||
session.headers.update(newHeaders)
|
||
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
|
||
|
||
interval_i = interval_i + 1
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def fetch_rojabo_session(id,alldata,userid,rowerid,debug=False, **kwargs): # pragma: no cover
|
||
try:
|
||
item = alldata[id]
|
||
except KeyError:
|
||
return 0
|
||
|
||
|
||
return 1
|
||
|
||
|
||
@app.task
|
||
def fetch_strava_workout(stravatoken, oauth_data, stravaid, csvfilename, userid, debug=False, **kwargs):
|
||
authorizationstring = str('Bearer '+stravatoken)
|
||
headers = {'Authorization': authorizationstring,
|
||
'user-agent': 'sanderroosendaal',
|
||
'Content-Type': 'application/json',
|
||
'resolution': 'medium', }
|
||
url = "https://www.strava.com/api/v3/activities/"+str(stravaid)
|
||
response = requests.get(url, headers=headers)
|
||
if response.status_code != 200: # pragma: no cover
|
||
dologging('stravalog.log', 'handle_get_strava_file response code {code}\n'.format(
|
||
code=response.status_code))
|
||
try:
|
||
dologging('stravalog.log','Response json {json}\n'.format(json=response.json()))
|
||
except:
|
||
pass
|
||
|
||
return 0
|
||
|
||
try:
|
||
workoutsummary = requests.get(url, headers=headers).json()
|
||
except: # pragma: no cover
|
||
return 0
|
||
|
||
spm = get_strava_stream(None, 'cadence', stravaid,
|
||
authorizationstring=authorizationstring)
|
||
hr = get_strava_stream(None, 'heartrate', stravaid,
|
||
authorizationstring=authorizationstring)
|
||
t = get_strava_stream(None, 'time', stravaid,
|
||
authorizationstring=authorizationstring)
|
||
velo = get_strava_stream(None, 'velocity_smooth',
|
||
stravaid, authorizationstring=authorizationstring)
|
||
d = get_strava_stream(None, 'distance', stravaid,
|
||
authorizationstring=authorizationstring)
|
||
coords = get_strava_stream(
|
||
None, 'latlng', stravaid, authorizationstring=authorizationstring)
|
||
power = get_strava_stream(None, 'watts', stravaid,
|
||
authorizationstring=authorizationstring)
|
||
|
||
tstamp = time.localtime()
|
||
timestamp = time.strftime('%b-%d-%Y_%H%M', tstamp)
|
||
with open('strava_webhooks.log', 'a') as f:
|
||
f.write('\n')
|
||
f.write(timestamp)
|
||
f.write(' ')
|
||
f.write(url)
|
||
f.write(' ')
|
||
f.write('Response data {data}\n'.format(data=workoutsummary))
|
||
|
||
if t is not None:
|
||
nr_rows = len(t)
|
||
else: # pragma: no cover
|
||
try:
|
||
duration = int(workoutsummary['elapsed_time'])
|
||
except KeyError:
|
||
duration = 0
|
||
t = pd.Series(range(duration+1))
|
||
|
||
nr_rows = len(t)
|
||
|
||
if nr_rows == 0: # pragma: no cover
|
||
return 0
|
||
|
||
if d is None: # pragma: no cover
|
||
d = 0*t
|
||
|
||
if spm is None: # pragma: no cover
|
||
spm = np.zeros(nr_rows)
|
||
|
||
if power is None: # pragma: no cover
|
||
power = np.zeros(nr_rows)
|
||
|
||
if hr is None: # pragma: no cover
|
||
hr = np.zeros(nr_rows)
|
||
|
||
if velo is None: # pragma: no cover
|
||
velo = np.zeros(nr_rows)
|
||
|
||
try:
|
||
dt = np.diff(t).mean()
|
||
wsize = round(5./dt)
|
||
|
||
velo2 = ewmovingaverage(velo, wsize)
|
||
except ValueError: # pragma: no cover
|
||
velo2 = velo
|
||
|
||
if coords is not None:
|
||
try:
|
||
lat = coords[:, 0]
|
||
lon = coords[:, 1]
|
||
except IndexError: # pragma: no cover
|
||
lat = np.zeros(len(t))
|
||
lon = np.zeros(len(t))
|
||
else: # pragma: no cover
|
||
lat = np.zeros(len(t))
|
||
lon = np.zeros(len(t))
|
||
|
||
try:
|
||
strokelength = velo*60./(spm)
|
||
strokelength[np.isinf(strokelength)] = 0.0
|
||
except ValueError:
|
||
strokelength = np.zeros(len(t))
|
||
|
||
pace = 500./(1.0*velo2)
|
||
pace[np.isinf(pace)] = 0.0
|
||
|
||
try:
|
||
strokedata = pl.DataFrame({'t': 10*t,
|
||
'd': 10*d,
|
||
'p': 10*pace,
|
||
'spm': spm,
|
||
'hr': hr,
|
||
'lat': lat,
|
||
'lon': lon,
|
||
'power': power,
|
||
'strokelength': strokelength,
|
||
})
|
||
except ValueError: # pragma: no cover
|
||
return 0
|
||
except ShapeError:
|
||
return 0
|
||
|
||
try:
|
||
workouttype = mytypes.stravamappinginv[workoutsummary['type']]
|
||
except KeyError: # pragma: no cover
|
||
workouttype = 'other'
|
||
|
||
if workouttype.lower() == 'rowing': # pragma: no cover
|
||
workouttype = 'rower'
|
||
|
||
try:
|
||
if 'summary_polyline' in workoutsummary['map'] and workouttype == 'rower': # pragma: no cover
|
||
workouttype = 'water'
|
||
except (KeyError,TypeError): # pragma: no cover
|
||
pass
|
||
|
||
try:
|
||
rowdatetime = iso8601.parse_date(workoutsummary['date_utc'])
|
||
except KeyError:
|
||
try:
|
||
rowdatetime = iso8601.parse_date(workoutsummary['start_date'])
|
||
except KeyError:
|
||
rowdatetime = iso8601.parse_date(workoutsummary['date'])
|
||
except ParseError: # pragma: no cover
|
||
rowdatetime = iso8601.parse_date(workoutsummary['date'])
|
||
|
||
try:
|
||
title = workoutsummary['name']
|
||
except KeyError: # pragma: no cover
|
||
title = ""
|
||
try:
|
||
t = workoutsummary['comments'].split('\n', 1)[0]
|
||
title += t[:20]
|
||
except:
|
||
title = ''
|
||
|
||
starttimeunix = arrow.get(rowdatetime).timestamp()
|
||
|
||
res = make_cumvalues_array(0.1*strokedata['t'].to_numpy())
|
||
cum_time = pl.Series(res[0])
|
||
lapidx = pl.Series(res[1])
|
||
|
||
unixtime = cum_time+starttimeunix
|
||
seconds = 0.1*strokedata['t']
|
||
|
||
nr_rows = len(unixtime)
|
||
|
||
try:
|
||
latcoord = strokedata['lat']
|
||
loncoord = strokedata['lon']
|
||
if latcoord.std() == 0 and loncoord.std() == 0 and workouttype == 'water': # pragma: no cover
|
||
workouttype = 'rower'
|
||
except: # pragma: no cover
|
||
latcoord = np.zeros(nr_rows)
|
||
loncoord = np.zeros(nr_rows)
|
||
if workouttype == 'water':
|
||
workouttype = 'rower'
|
||
|
||
try:
|
||
strokelength = strokedata['strokelength']
|
||
except: # pragma: no cover
|
||
strokelength = np.zeros(nr_rows)
|
||
|
||
dist2 = 0.1*strokedata['d']
|
||
|
||
try:
|
||
spm = strokedata['spm']
|
||
except (KeyError, ColumnNotFoundError): # pragma: no cover
|
||
spm = 0*dist2
|
||
|
||
try:
|
||
hr = strokedata['hr']
|
||
except (KeyError, ColumnNotFoundError): # pragma: no cover
|
||
hr = 0*spm
|
||
pace = strokedata['p']/10.
|
||
pace = np.clip(pace, 0, 1e4)
|
||
pace = pl.Series(pace).replace(0, 300)
|
||
|
||
velo = 500./pace
|
||
|
||
try:
|
||
power = strokedata['power']
|
||
except KeyError: # pragma: no cover
|
||
power = 2.8*velo**3
|
||
|
||
# if power.std() == 0 and power.mean() == 0:
|
||
# power = 2.8*velo**3
|
||
|
||
# save csv
|
||
# Create data frame with all necessary data to write to csv
|
||
df = pl.DataFrame({'TimeStamp (sec)': unixtime,
|
||
' Horizontal (meters)': dist2,
|
||
' Cadence (stokes/min)': spm,
|
||
' HRCur (bpm)': hr,
|
||
' longitude': loncoord,
|
||
' latitude': latcoord,
|
||
' Stroke500mPace (sec/500m)': pace,
|
||
' Power (watts)': power,
|
||
' DragFactor': np.zeros(nr_rows),
|
||
' DriveLength (meters)': np.zeros(nr_rows),
|
||
' StrokeDistance (meters)': strokelength,
|
||
' DriveTime (ms)': np.zeros(nr_rows),
|
||
' StrokeRecoveryTime (ms)': np.zeros(nr_rows),
|
||
' AverageDriveForce (lbs)': np.zeros(nr_rows),
|
||
' PeakDriveForce (lbs)': np.zeros(nr_rows),
|
||
' lapIdx': lapidx,
|
||
' ElapsedTime (sec)': seconds,
|
||
'cum_dist': dist2,
|
||
})
|
||
|
||
df.sort('TimeStamp (sec)')
|
||
|
||
row = rowingdata.rowingdata_pl(df=df)
|
||
try:
|
||
row.write_csv(csvfilename, compressed=False)
|
||
except ComputeError:
|
||
dologging('stravalog.log','polars not working')
|
||
row = rowingdata.rowingdata(df=df.to_pandas())
|
||
row.write_csv(csvfilename)
|
||
|
||
# summary = row.allstats()
|
||
# maxdist = df['cum_dist'].max()
|
||
duration = row.duration
|
||
|
||
uploadoptions = {
|
||
'secret': UPLOAD_SERVICE_SECRET,
|
||
'user': userid,
|
||
'file': csvfilename,
|
||
'title': title,
|
||
'workouttype': workouttype,
|
||
'boattype': '1x',
|
||
'stravaid': stravaid,
|
||
}
|
||
|
||
session = requests.session()
|
||
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
|
||
session.headers.update(newHeaders)
|
||
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
|
||
|
||
t = time.localtime()
|
||
timestamp = time.strftime('%b-%d-%Y_%H%M', t)
|
||
with open('strava_webhooks.log', 'a') as f:
|
||
f.write('\n')
|
||
f.write(timestamp)
|
||
f.write(' ')
|
||
f.write('fetch_strava_workout posted file with strava id {stravaid} user id {userid}\n'.format(
|
||
stravaid=stravaid, userid=userid))
|
||
|
||
return 1
|