Private
Public Access
1
0

Merge branch 'feature/newdataflow' into develop

This commit is contained in:
2025-11-16 13:11:52 +01:00
32 changed files with 3738 additions and 2402 deletions

View File

@@ -25,13 +25,13 @@ import numpy as np
from rowsandall_app.settings import (
C2_CLIENT_ID, C2_REDIRECT_URI, C2_CLIENT_SECRET,
UPLOAD_SERVICE_URL, UPLOAD_SERVICE_SECRET
)
from rowers.tasks import (
handle_c2_import_stroke_data, handle_c2_sync, handle_c2_async_workout,
handle_c2_getworkout
handle_c2_import_stroke_data, handle_c2_sync,
)
from rowers.upload_tasks import handle_c2_async_workout, handle_c2_getworkout
import django_rq
queue = django_rq.get_queue('default')
queuelow = django_rq.get_queue('low')

723
rowers/dataflow.py Normal file
View File

@@ -0,0 +1,723 @@
from rowers.celery import app
from rowers.utils import myqueue
import zipfile
import os
from rowingdata import get_file_type
from rowingdata import rowingdata as rrdata
import django_rq
from shutil import copyfile
from time import strftime
import numpy as np
from scipy.signal import find_peaks, savgol_filter
import pandas as pd
import datetime
import math
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
from YamJam import yamjam
CFG = yamjam()['rowsandallapp']
try:
os.environ.setdefault("DJANGO_SETTINGS_MODULE",CFG['settings_name'])
except KeyError: # pragma: no cover
os.environ.setdefault("DJANGO_SETTINGS_MODULE","rowsandall_app.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
queue = django_rq.get_queue('default')
queuelow = django_rq.get_queue('low')
queuehigh = django_rq.get_queue('default')
from django.conf import settings
from django.urls import reverse
from django.utils import timezone as tz
from rowers.forms import DocumentsForm, TeamUploadOptionsForm
from rowers.models import (
TeamInviteForm, Workout, User, Rower, Team,
VirtualRace, IndoorVirtualRaceResult, VirtualRaceResult)
from rowers.opaque import encoder
from rowers import uploads
from rowingdata import rower as rrower
from rowers.dataroutines import (
rdata, get_startdate_time_zone, df_resample, checkduplicates, dataplep,
get_workouttype_from_fit,
get_title_from_fit,
get_notes_from_fit,
)
from rowers.mytypes import otetypes, otwtypes
from rowers.utils import totaltime_sec_to_string
from rowers.dataprep import check_marker, checkbreakthrough, update_wps, handle_nonpainsled
from rowers.emails import send_confirm
from rowers.tasks import handle_sendemail_unrecognized, handle_sendemail_breakthrough, handle_sendemail_hard, handle_calctrimp
from uuid import uuid4
def getrower(user):
try:
if user is None or user.is_anonymous: # pragma: no cover
return None
except AttributeError: # pragma: no cover
if User.objects.get(id=user).is_anonymous:
return None
try:
r = Rower.objects.get(user=user)
except Rower.DoesNotExist: # pragma: no cover:
r = Rower(user=user)
r.save()
return r
def generate_job_id():
return str(uuid4())
def valid_uploadoptions(uploadoptions):
fstr = uploadoptions.get('file', None)
if fstr is None: # pragma: no cover
return False, "Missing file in upload options."
# check if file can be found
if isinstance(fstr, str):
if not os.path.isfile(fstr): # pragma: no cover
return False, f"File not found: {fstr}"
form = DocumentsForm(uploadoptions)
optionsform = TeamUploadOptionsForm(uploadoptions)
rowerform = TeamInviteForm(uploadoptions)
rowerform.fields.pop('email') # we don't need email here
return (
form.is_valid() and optionsform.is_valid() and rowerform.is_valid(),
"{form_errors}, {optionsform_errors}, {rowerform_errors}".format(
form_errors=form.errors,
optionsform_errors=optionsform.errors,
rowerform_errors=rowerform.errors,
))
def is_zipfile(file_path):
fileformat = get_file_type(file_path)
return fileformat[0] == 'zip'
def is_invalid_file(file_path):
fileformat = get_file_type(file_path)
if fileformat == "imageformat":
return False, "Image files are not supported for upload."
if fileformat == "json":
return False, "JSON files are not supported for upload."
if fileformat == "c2log": # pragma: no cover
return False, "Concept2 log files are not supported for upload."
if fileformat == "nostrokes": # pragma: no cover
return False, "No stroke data found in the file."
if fileformat == "kml":
return False, "KML files are not supported for upload."
if fileformat == "notgzip": # pragma: no cover
return False, "The gzip file appears to be corrupted."
if fileformat == "rowprolog": # pragma: no cover
return False, "RowPro logbook summary files are not supported for upload."
if fileformat == "gpx":
return False, "GPX files are not supported for upload."
if fileformat == "unknown": # pragma: no cover
extension = os.path.splitext(f2)[1]
filename = os.path.splitext(f2)[0]
if extension == '.gz':
filename = os.path.splitext(filename)[0]
extension2 = os.path.splitext(filename)[1]+extension
extension = extension2
f4 = filename+'a'+extension
copyfile(f2, f4)
_ = myqueue(queuelow,
handle_sendemail_unrecognized,
f4,
r.user.email)
return False, "The file format is not recognized or supported."
return True, ""
def upload_handler(uploadoptions, filename, createworkout=False, debug=False, **kwargs):
valid, message = valid_uploadoptions(uploadoptions)
if not valid: # pragma: no cover
return {
"status": "error",
"job_id": None,
"message": message
}
is_valid, message = is_invalid_file(filename)
if not is_valid: # pragma: no cover
os.remove(filename)
return {
"status": "error",
"job_id": None,
"message": message
}
if is_zipfile(filename):
parent_job_id = generate_job_id()
_ = myqueue(
queuehigh,
unzip_and_process,
filename,
uploadoptions,
parent_job_id)
return {
"status": "processing",
"job_id": parent_job_id,
"message": "Your zip file is being processed. You will be notified when it is complete."
}
job_id = generate_job_id()
if 'id' not in uploadoptions and createworkout:
w = Workout(
user=get_rower_from_uploadoptions(uploadoptions),
duration='00:00:00'
)
w.save()
uploadoptions['id'] = w.id
if 'id' in uploadoptions:
job_id = encoder.encode_hex(uploadoptions['id'])
_ = myqueue(
queuehigh,
process_single_file,
filename,
uploadoptions,
job_id)
return {
"status": "processing",
"job_id": job_id,
"message": "Your file is being processed. You will be notified when it is complete."
}
@app.task
def unzip_and_process(zip_filepath, uploadoptions, parent_job_id, debug=False, **kwargs):
with zipfile.ZipFile(zip_filepath, 'r') as zip_ref:
for id, filename in enumerate(zip_ref.namelist()):
datafile = zip_ref.extract(filename, path='media/')
if id > 0:
uploadoptions['title'] = uploadoptions['title'] + " Part {id}".format(id=id)
uploadoptions['file'] = datafile
job_id = generate_job_id()
_ = myqueue(
queuehigh,
process_single_file,
datafile,
uploadoptions,
job_id)
return {
"status": "completed",
"job_id": parent_job_id,
"message": "All files from the zip have been processed."
}
def get_rower_from_uploadoptions(uploadoptions):
rowerform = TeamInviteForm(uploadoptions)
if not rowerform.is_valid(): # pragma: no cover
return None
try:
u = rowerform.cleaned_data['user']
r = getrower(u)
except KeyError:
if 'useremail' in uploadoptions:
us = User.objects.filter(email=uploadoptions['useremail'])
if len(us):
u = us[0]
r = getrower(u)
else: # pragma: no cover
r = None
for rwr in Rower.objects.all():
if rwr.emailalternatives is not None:
if uploadoptions['useremail'] in rwr.emailalternatives:
r = rwr
break
return r
def check_and_fix_samplerate(row, file_path):
# implement sample rate check and fix here
dtavg = row.df['TimeStamp (sec)'].diff().mean()
if dtavg < 1:
newdf = df_resample(row.df)
try:
os.remove(file_path)
except Exception:
pass
row = rrdata(df=newdf)
row.write_csv(file_path, gzip=True)
return row, file_path
def is_water_rowing(df):
try:
lat = df[' latitude']
if lat.mean() != 0 and lat.std() != 0:
return True
except KeyError:
return False
def remove_negative_power_peaks(row):
x = row.df[' Power (watts)'].values
x = x * - 1
neg_peaks, _ = find_peaks(x, height=0) # hieght is the threshold value
row.df[' Power (watts)'][neg_peaks] = row.df[' Power (watts)'][neg_peaks-1]
x = row.df[' Power (watts)'].values
x = x * - 1
neg_peaks, _ = find_peaks(x, height=0) # hieght is the threshold value
row.df[' Power (watts)'][neg_peaks] = row.df[' Power (watts)'][neg_peaks-1]
return row
def do_smooth(row, f2):
# implement smoothing here if needed
pace = row.df[' Stroke500mPace (sec/500m)'].values
velo = 500. / pace
f = row.df['TimeStamp (sec)'].diff().mean()
if f != 0 and not np.isnan(f):
windowsize = 2 * (int(10. / (f))) + 1
else: # pragma: no cover
windowsize = 1
if 'originalvelo' not in row.df:
row.df['originalvelo'] = velo
if windowsize > 3 and windowsize < len(velo):
velo2 = savgol_filter(velo, windowsize, 3)
else: # pragma: no cover
velo2 = velo
velo3 = pd.Series(velo2, dtype='float')
velo3 = velo3.replace([-np.inf, np.inf], np.nan)
velo3 = velo3.fillna(method='ffill')
pace2 = 500. / abs(velo3)
row.df[' Stroke500mPace (sec/500m)'] = pace2
row.df = row.df.fillna(0)
row.write_csv(f2, gzip=True)
try:
os.remove(f2)
except:
pass
return row
def update_workout_attributes(w, row, file_path, uploadoptions,
startdatetime='',
timezone='', forceunit='lbs'):
# calculate
startdatetime, startdate, starttime, timezone_str, partofday = get_startdate_time_zone(
w.user, row, startdatetime=startdatetime, timezone=timezone
)
boattype = uploadoptions.get('boattype', '1x')
workoutsource = uploadoptions.get('workoutsource', 'unknown')
stravaid = uploadoptions.get('stravaid', 0)
rpe = uploadoptions.get('rpe', 0)
notes = uploadoptions.get('notes', '')
inboard = uploadoptions.get('inboard', 0.88)
oarlength = uploadoptions.get('oarlength', 2.89)
useImpeller = uploadoptions.get('useImpeller', False)
seatnumber = uploadoptions.get('seatNumber', 1)
boatname = uploadoptions.get('boatName','')
portStarboard = uploadoptions.get('portStarboard', 1)
empowerside = 'port'
raceid = uploadoptions.get('raceid', 0)
registrationid = uploadoptions.get('submitrace', 0)
if portStarboard == 1:
empowerside = 'starboard'
stravaid = uploadoptions.get('stravaid',0)
if stravaid != 0: # pragma: no cover
workoutsource = 'strava'
w.uploadedtostrava = stravaid
workouttype = uploadoptions.get('workouttype', 'rower')
title = uploadoptions.get('title', '')
if title is None or title == '':
title = 'Workout'
if partofday is not None:
title = '{partofday} {workouttype}'.format(
partofday=partofday,
workouttype=workouttype,
)
averagehr = row.df[' HRCur (bpm)'].mean()
maxhr = row.df[' HRCur (bpm)'].max()
totaldist = uploadoptions.get('distance', 0)
if totaldist == 0:
totaldist = row.df['cum_dist'].max()
totaltime = uploadoptions.get('duration', 0)
if totaltime == 0:
totaltime = row.df['TimeStamp (sec)'].max() - row.df['TimeStamp (sec)'].min()
try:
totaltime = totaltime + row.df.loc[:, ' ElapsedTime (sec)'].iloc[0]
except KeyError: # pragma: no cover
pass
if np.isnan(totaltime): # pragma: no cover
totaltime = 0
if uploadoptions.get('summary', '') == '':
summary = row.allstats()
else:
summary = uploadoptions.get('summary', '')
if uploadoptions.get('makeprivate', False): # pragma: no cover
privacy = 'hidden'
elif workoutsource != 'strava':
privacy = 'visible'
else: # pragma: no cover
privacy = 'hidden'
# checking for in values
totaldist = np.nan_to_num(totaldist)
maxhr = np.nan_to_num(maxhr)
averagehr = np.nan_to_num(averagehr)
dragfactor = 0
if workouttype in otetypes:
dragfactor = row.dragfactor
delta = datetime.timedelta(seconds=totaltime)
try:
workoutenddatetime = startdatetime+delta
except AttributeError as e: # pragma: no cover
workoutstartdatetime = pendulum.parse(str(startdatetime))
workoutenddatetime = startdatetime+delta
# check for duplicate start times and duration
duplicate = checkduplicates(
w.user, startdate, startdatetime, workoutenddatetime)
if duplicate: # pragma: no cover
rankingpiece = False
# test title length
if title is not None and len(title) > 140: # pragma: no cover
title = title[0:140]
timezone_str = str(startdatetime.tzinfo)
duration = totaltime_sec_to_string(totaltime)
# implement workout attribute updates here
w.name = title
w.date = startdate
w.workouttype = workouttype
w.boattype = boattype
w.dragfactor = dragfactor
w.duration = duration
w.distance = totaldist
w.weightcategory = w.user.weightcategory
w.adaptiveclass = w.user.adaptiveclass
w.starttime = starttime
w.duplicate = duplicate
w.workoutsource = workoutsource
w.rankingpiece = False
w.forceunit = forceunit
w.rpe = rpe
w.csvfilename = file_path
w.notes = notes
w.summary = summary
w.maxhr = maxhr
w.averagehr = averagehr
w.startdatetime = startdatetime
w.inboard = inboard
w.oarlength = oarlength
w.seatnumber = seatnumber
w.boatname = boatname
w.empowerside = empowerside
w.timezone = timezone_str
w.privacy = privacy
w.impeller = useImpeller
w.save()
# check for registrationid
if registrationid != 0: # pragma: no cover
races = VirtualRace.objects.filter(
registration_closure__gt=tz.now(),
id=raceid,
)
registrations = IndoorVirtualRaceResult.objects.filter(
race__in=races,
id=registrationid,
userid=w.user.id
)
registrations2 = VirtualRaceResult.objects.filter(
race__in=races,
id=registrationid,
userid=w.user.id)
if registrationid in [r.id for r in registrations]:
# indoor race
registrations = registrations.filter(id=registrationid)
if registrations:
race = registrations[0].race
if race.sessiontype == 'indoorrace':
result, comments, errors, jobid = add_workout_indoorrace(
[w], race, w.user, recordid=registrations[0].id
)
elif race.sessiontype in ['fastest_time', 'fastest_distance']:
result, comments, errors, jobid = add_workout_fastestrace(
[w], race, w.user, recordid=registrations[0].id
)
if registrationid in [r.id for r in registrations2]:
registration = registrations2.filter(id=registrationid)
if registrations:
race = registrations[0].race
if race.sessiontype == 'race':
result, comments, errors, jobid = add_workout_race(
[w], race, w.user, recordid=registrations2[0].id
)
elif race.sessiontype in ['fastest_time', 'fastest_distance']:
result, comments, errors, jobid = add_workout_fastestrace(
[w], race, w.user, recordid=registrations2[0].id
)
return w
def send_upload_confirmation_email(rower, workout):
# implement email sending here
if rower.getemailnotifications and not rower.emailbounced: # pragma: no cover
link = settings.SITE_URL+reverse(
rower.defaultlandingpage,
kwargs={
'id': encoder.encode_hex(workout.id),
}
)
_ = send_confirm(rower.user, workout.name, link, '')
def update_running_wps(r, w, row):
# implement wps update here
if not w.duplicate and w.workouttype in otetypes:
cntr = Workout.objects.filter(user=r, workouttype__in=otetypes,
startdatetime__gt=tz.now()-tz.timedelta(days=42),
duplicate=False).count()
new_value = (cntr*r.running_wps_erg + row.df['driveenergy'].mean())/(cntr+1.0)
# if new_value is not zero or infinite or -inf, r.running_wps can be set to value
if not (math.isnan(new_value) or math.isinf(new_value) or new_value == 0): # pragma: no cover
r.running_wps_erg = new_value
elif not (math.isnan(r.running_wps_erg) or math.isinf(r.running_wps_erg) or r.running_wps_erg == 0):
pass
else: # pragma: no cover
r.running_wps_erg = 600.
r.save()
if not w.duplicate and w.workouttype in otwtypes:
cntr = Workout.objects.filter(user=r, workouttype__in=otwtypes,
startdatetime__gt=tz.now()-tz.timedelta(days=42),
duplicate=False).count()
try:
new_value = (cntr*r.running_wps_erg + row.df['driveenergy'].mean())/(cntr+1.0)
except TypeError: # pragma: no cover
new_value = r.running_wps
if not (math.isnan(new_value) or math.isinf(new_value) or new_value == 0):
r.running_wps = new_value
elif not (math.isnan(r.running_wps) or math.isinf(r.running_wps) or r.running_wps == 0):
pass
else: # pragma: no cover
r.running_wps = 400.
r.save()
@app.task
def process_single_file(file_path, uploadoptions, job_id, debug=False, **kwargs):
# copy file to a unique name in media folder
f2 = file_path
try:
nn, ext = os.path.splitext(f2)
if ext == '.gz':
nn, ext2 = os.path.splitext(nn)
ext = ext2 + ext
f1 = uuid4().hex[:10]+'-'+strftime('%Y%m%d-%H%M%S')+ext
f2 = 'media/'+f1
copyfile(file_path, f2)
except FileNotFoundError: # pragma: no cover
return {
"status": "error",
"job_id": job_id,
"message": "File not found during processing."
}
# determine the user
r = get_rower_from_uploadoptions(uploadoptions)
if r is None: # pragma: no cover
os.remove(f2)
return {
"status": "error",
"job_id": job_id,
"message": "Rower not found for the provided upload options."
}
try:
fileformat = get_file_type(f2)
except Exception as e: # pragma: no cover
os.remove(f2)
return {
"status": "error",
"job_id": job_id,
"message": "Error determining file format: {error}".format(error=str(e))
}
# Get fileformat from fit & tcx
if "fit" in fileformat:
workouttype = get_workouttype_from_fit(f2)
uploadoptions['workouttype'] = workouttype
new_title = get_title_from_fit(f2)
if new_title: # pragma: no cover
uploadoptions['title'] = new_title
new_notes = get_notes_from_fit(f2)
if new_notes: # pragma: no cover
uploadoptions['notes'] = new_notes
# handle non-Painsled
if fileformat != 'csv':
f2, summary, oarlength, inboard, fileformat, impeller = handle_nonpainsled(
f2,
fileformat,
)
uploadoptions['summary'] = summary
uploadoptions['oarlength'] = oarlength
uploadoptions['inboard'] = inboard
uploadoptions['useImpeller'] = impeller
if uploadoptions['workouttype'] != 'strave':
uploadoptions['workoutsource'] = fileformat
if not f2: # pragma: no cover
return {
"status": "error",
"job_id": job_id,
"message": "Error processing non-Painsled file."
}
# create raw row data object
powerperc = 100 * np.array([r.pw_ut2,
r.pw_ut1,
r.pw_at,
r.pw_tr, r.pw_an]) / r.ftp
rr = rrower(hrmax=r.max, hrut2=r.ut2,
hrut1=r.ut1, hrat=r.at,
hrtr=r.tr, hran=r.an, ftp=r.ftp,
powerperc=powerperc, powerzones=r.powerzones)
row = rdata(f2, rower=rr)
if row.df.empty: # pragma: no cover
os.remove(f2)
return {
"status": "error",
"job_id": job_id,
"message": "No valid data found in the uploaded file."
}
if row == 0: # pragma: no cover
os.remove(f2)
return {
"status": "error",
"job_id": job_id,
"message": "Error creating row data from the file."
}
# check and fix sample rate
row, f2 = check_and_fix_samplerate(row, f2)
# change rower type to water if GPS data is present
if is_water_rowing(row.df):
uploadoptions['workouttype'] = 'water'
# remove negative power peaks
row = remove_negative_power_peaks(row)
# optional auto smoothing
row = do_smooth(row, f2)
# recalculate power data
if uploadoptions['workouttype'] in otetypes:
try:
if r.erg_recalculatepower:
row.erg_recalculatepower()
row.write_csv(f2, gzip=True)
except Exception as e:
pass
workoutid = uploadoptions.get('id', None)
if workoutid is not None: # pragma: no cover
try:
w = Workout.objects.get(id=workoutid)
except Workout.DoesNotExist:
w = Workout(user=r, duration='00:00:00')
w.save()
else:
w = Workout(user=r, duration='00:00:00')
w.save()
# set workout attributes from uploadoptions and calculated values
w = update_workout_attributes(w, row, f2, uploadoptions)
# add teams
if w.privacy == 'visible':
ts = Team.objects.filter(rower=r
)
for t in ts: # pragma: no cover
w.team.add(t)
# put stroke data in file store through "dataplep"
try:
row = rrdata_pl(df=pl.form_pandas(row.df))
except:
pass
_ = dataplep(row.df, id=w.id, bands=True,
barchart=True, otwpower=True, empower=True, inboard=w.inboard)
# send confirmation email
send_upload_confirmation_email(r, w)
# check for breakthroughs
isbreakthrough, ishard = checkbreakthrough(w, r)
_ = check_marker(w)
_ = update_wps(r, otwtypes)
_ = update_wps(r, otetypes)
# update running_wps
update_running_wps(r, w, row)
# calculate TRIMP
if w.workouttype in otwtypes:
wps_avg = r.median_wps
elif w.workouttype in otetypes:
wps_avg = r.median_wps_erg
else: # pragma: no cover
wps_avg = 0
_ = myqueue(queuehigh, handle_calctrimp, w.id, f2,
r.ftp, r.sex, r.hrftp, r.max, r.rest, wps_avg)
# make plots
if uploadoptions.get('makeplot', False): # pragma: no cover
plottype = uploadoptions.get('plottype', 'timeplot')
res, jobid = uploads.make_plot(r, w, f1, f2, plottype, w.name)
elif r.staticchartonupload != 'None': # pragma: no cover
plottype = r.staticchartonupload
res, jobid = uploads.make_plot(r, w, f1, f2, plottype, w.name)
# sync workouts to connected services
uploads.do_sync(w, uploadoptions, quick=True)
return True, f2

View File

@@ -1658,18 +1658,6 @@ def new_workout_from_file(r, f2,
if fileformat == 'unknown': # pragma: no cover
message = "We couldn't recognize the file type"
extension = os.path.splitext(f2)[1]
filename = os.path.splitext(f2)[0]
if extension == '.gz':
filename = os.path.splitext(filename)[0]
extension2 = os.path.splitext(filename)[1]+extension
extension = extension2
f4 = filename+'a'+extension
copyfile(f2, f4)
_ = myqueue(queuehigh,
handle_sendemail_unrecognized,
f4,
r.user.email)
return (0, message, f2)

View File

@@ -425,8 +425,6 @@ class DocumentsForm(forms.Form):
notes = forms.CharField(required=False,
widget=forms.Textarea)
offline = forms.BooleanField(initial=False, required=False,
label='Process in Background')
class Meta:
fields = ['title', 'file', 'workouttype',
@@ -580,9 +578,6 @@ class UploadOptionsForm(forms.Form):
label='Submit as challenge Result',
required=False)
landingpage = forms.ChoiceField(choices=nextpages,
initial='workout_edit_view',
label='After Upload, go to')
raceid = forms.IntegerField(initial=0, widget=HiddenInput())

View File

@@ -15,13 +15,13 @@ import pytz
from rowsandall_app.settings import (
C2_CLIENT_ID, C2_REDIRECT_URI, C2_CLIENT_SECRET,
UPLOAD_SERVICE_URL, UPLOAD_SERVICE_SECRET
)
from rowers.tasks import (
handle_c2_import_stroke_data, handle_c2_sync, handle_c2_async_workout,
handle_c2_getworkout
handle_c2_import_stroke_data, handle_c2_sync,
)
from rowers.upload_tasks import handle_c2_async_workout, handle_c2_getworkout
import django_rq
queue = django_rq.get_queue('default')
queuelow = django_rq.get_queue('low')

View File

@@ -10,7 +10,7 @@ from rowers import mytypes
import shutil
from rowers.rower_rules import is_workout_user, ispromember
from rowers.utils import myqueue, dologging, custom_exception_handler
from rowers.tasks import handle_intervals_getworkout, handle_request_post
from rowers.upload_tasks import handle_intervals_getworkout
import urllib
import gzip
@@ -26,7 +26,6 @@ from rowers.opaque import encoder
from rowsandall_app.settings import (
INTERVALS_CLIENT_ID, INTERVALS_REDIRECT_URI, INTERVALS_CLIENT_SECRET, SITE_URL,
UPLOAD_SERVICE_SECRET, UPLOAD_SERVICE_URL
)
import django_rq
@@ -57,6 +56,7 @@ intervals_token_url = 'https://intervals.icu/api/oauth/token'
webhookverification = 'JA9Vt6RNH10'
class IntervalsIntegration(SyncIntegration):
def __init__(self, *args, **kwargs):
super(IntervalsIntegration, self).__init__(*args, **kwargs)
self.oauth_data = {
@@ -315,6 +315,7 @@ class IntervalsIntegration(SyncIntegration):
return workouts
def update_workout(self, id, *args, **kwargs) -> int:
from rowers.dataflow import upload_handler
try:
_ = self.open()
except NoTokenError:
@@ -419,7 +420,6 @@ class IntervalsIntegration(SyncIntegration):
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'user': self.rower.user.id,
'boattype': '1x',
'workouttype': w.workouttype,
@@ -427,8 +427,8 @@ class IntervalsIntegration(SyncIntegration):
'intervalsid': id,
'id': w.id,
}
url = UPLOAD_SERVICE_URL
response = requests.post(url, data=uploadoptions)
response = upload_handler(uploadoptions, temp_filename)
except FileNotFoundError:
return 0
except Exception as e:
@@ -443,6 +443,7 @@ class IntervalsIntegration(SyncIntegration):
return 1
def get_workout(self, id, *args, **kwargs) -> int:
from rowers.dataflow import upload_handler
try:
_ = self.open()
except NoTokenError:
@@ -542,8 +543,17 @@ class IntervalsIntegration(SyncIntegration):
except:
return 0
w = Workout(
user=r,
name=title,
workoutsource='intervals.icu',
workouttype=workouttype,
duration=duration,
distance=distance,
intervalsid=id,
)
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'user': r.user.id,
'boattype': '1x',
'workouttype': workouttype,
@@ -555,28 +565,23 @@ class IntervalsIntegration(SyncIntegration):
'offline': False,
}
url = UPLOAD_SERVICE_URL
handle_request_post(url, uploadoptions)
response = upload_handler(uploadoptions, fit_filename)
try:
pair_id = data['paired_event_id']
pss = PlannedSession.objects.filter(intervals_icu_id=pair_id, rower=r)
ws = Workout.objects.filter(uploadedtointervals=id)
for w in ws:
w.sub_type = subtype
w.save()
if is_commute:
for w in ws:
w.is_commute = True
w.sub_type = "Commute"
w.save()
if is_race:
for w in ws:
w.is_race = True
w.save()
if pss.count() > 0:
for ps in pss:
for w in ws:
w.plannedsession = ps
w.save()
except KeyError:

View File

@@ -103,6 +103,8 @@ class PolarIntegration(SyncIntegration):
return 1
def get_polar_workouts(self, user):
from rowers.dataflow import upload_handler
r = Rower.objects.get(user=user)
exercise_list = []
@@ -191,28 +193,9 @@ class PolarIntegration(SyncIntegration):
'title': '',
}
url = settings.UPLOAD_SERVICE_URL
dologging('polar.log', uploadoptions)
dologging('polar.log', url)
_ = myqueue(
queuehigh,
handle_request_post,
url,
uploadoptions
)
dologging('polar.log', response.status_code)
if response.status_code != 200: # pragma: no cover
try:
dologging('polar.log', response.text)
except:
pass
try:
dologging('polar.log', response.json())
except:
pass
response = upload_handler(uploadoptions, filename)
if response['status'] != 'processing':
return 0
exercise_dict['filename'] = filename
else: # pragma: no cover

View File

@@ -1,10 +1,9 @@
from .integrations import SyncIntegration, NoTokenError, create_or_update_syncrecord, get_known_ids
from rowers.models import User, Rower, Workout, TombStone
from rowers.tasks import handle_rp3_async_workout
from rowers.upload_tasks import handle_rp3_async_workout
from rowsandall_app.settings import (
RP3_CLIENT_ID, RP3_CLIENT_KEY, RP3_REDIRECT_URI, RP3_CLIENT_SECRET,
UPLOAD_SERVICE_URL, UPLOAD_SERVICE_SECRET
)
from rowers.utils import myqueue, NoTokenError, dologging, uniqify

View File

@@ -3,7 +3,8 @@ from rowers.models import User, Rower, Workout, TombStone
from rowingdata import rowingdata
from rowers.tasks import handle_sporttracks_sync, handle_sporttracks_workout_from_data
from rowers.tasks import handle_sporttracks_sync
from rowers.upload_tasks import handle_sporttracks_workout_from_data
from rowers.rower_rules import is_workout_user
import rowers.mytypes as mytypes
from rowsandall_app.settings import (

View File

@@ -4,10 +4,11 @@ from rowingdata import rowingdata
from rowers import mytypes
from rowers.tasks import handle_strava_sync, fetch_strava_workout
from rowers.tasks import handle_strava_sync
from stravalib.exc import ActivityUploadFailed, TimeoutExceeded
from rowers.rower_rules import is_workout_user, ispromember
from rowers.utils import get_strava_stream, custom_exception_handler
from rowers.upload_tasks import fetch_strava_workout
from rowers.utils import myqueue, dologging
#from rowers.imports import *

View File

@@ -5,7 +5,6 @@ from datetime import timedelta
from uuid import uuid4
import traceback
from rowsandall_app.settings import UPLOAD_SERVICE_SECRET, UPLOAD_SERVICE_URL
from rowsandall_app.settings import NK_API_LOCATION
from rowers.utils import dologging
@@ -106,7 +105,6 @@ def add_workout_from_data(userid, nkid, data, strokedata, source='nk', splitdata
boattype = "1x"
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'user': userid,
'file': csvfilename,
'title': title,
@@ -128,26 +126,16 @@ def add_workout_from_data(userid, nkid, data, strokedata, source='nk', splitdata
dologging('nklog.log',json.dumps(uploadoptions))
dologging('metrics.log','NK ID {nkid}'.format(nkid=nkid))
session = requests.session()
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
session.headers.update(newHeaders)
from rowers.dataflow import upload_handler
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
response = upload_handler(uploadoptions, csvfilename)
if response.status_code != 200: # pragma: no cover
return 0, response.text
if response["status"] == "processing": # pragma: no cover
return 1, ""
else:
dologging('nklog.log','Upload response: {resp}'.format(resp=json.dumps(response)))
try:
workoutid = response.json()['id']
except KeyError: # pragma: no cover
workoutid = 0
# dologging('nklog.log','Workout ID {id}'.format(id=workoutid))
# evt update workout summary
# return
return workoutid, ""
return 0, response
def get_nk_intervalstats(workoutdata, strokedata):
@@ -353,3 +341,5 @@ def readlogs_summaries(logfile, dosave=0): # pragma: no cover
except Exception:
print(traceback.format_exc())
print("error")

View File

@@ -8,7 +8,6 @@ from datetime import timedelta
from rowsandall_app.settings import (
ROJABO_CLIENT_ID, ROJABO_REDIRECT_URI, ROJABO_CLIENT_SECRET,
SITE_URL, ROJABO_OAUTH_LOCATION,
UPLOAD_SERVICE_URL, UPLOAD_SERVICE_SECRET,
)
import gzip
import rowers.mytypes as mytypes

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,343 @@
{% extends "newbase.html" %}
{% load static %}
{% load rowerfilters %}
{% block title %}File loading{% endblock %}
{% block meta %}
<script type='text/javascript'
src='https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js'>
</script>
<script type='text/javascript'
src='https://ajax.aspnetcdn.com/ajax/jquery.validate/1.14.0/jquery.validate.min.js'>
</script>
{% endblock %}
{% block main %}
<div id="id_main">
<ul class="main-content">
<li class="grid_2">
<div id="id_dropregion" class="watermark invisible">
<p>Drag and drop files here </p>
</div>
<div id="id_drop-files" class="drop-files">
<form id="file_form" enctype="multipart/form-data" action="{{ formloc }}" method="post">
<h1>Upload Workout File</h1>
{% if user.is_authenticated and user|coach_rowers %}
<p>Looking for <a href="/rowers/workout/upload/team/">Team Manager
Upload?</a></p>
{% endif %}
{% if form.errors %}
<p style="color: red;">
Please correct the error{{ form.errors|pluralize }} below.
</p>
{% endif %}
<table>
{{ form.as_table }}
</table>
{% csrf_token %}
<p>
&nbsp;<input type="submit" value="Submit">
</p>
</div>
</li>
<li class="grid_2">
<h1>Optional extra actions</h1>
<p>
<table>
{{ optionsform.as_table }}
</table>
</p>
<p>
You can select one static plot to be generated immediately for
this workout. You can select to export to major fitness
platforms automatically.
If you check "make private", this workout will not be visible to your followers and will not show up in your teams' workouts list. With the Landing Page option, you can select to which (workout related) page you will be
taken after a successfull upload.
</p>
<p>
If you don't have a workout file but have written down the splits,
you can create a workout file yourself from <a href="/static/dummy_workout_template.xls">this template</a>
</p>
<p><b>Select Files with the File button or drag them on the marked area</b></p>
</li>
</form>
</ul>
</div>
{% endblock %}
{% block scripts %}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
<script>
$( document ).ready(function() {
var boattypes = {
'1x': '1x (single)',
'2x': '2x (double)',
'2x+': '2x+ (coxed double)',
'2-': '2- (pair)',
'2+': '2+ (coxed pair)',
'3x+': '3x+ (coxed triple)',
'3x-': '3x- (triple)',
'4x': '4x (quad)',
'4x+': '4x+ (coxed quad)',
'4-': '4- (four)',
'4+': '4+ (coxed four)',
'8+': '8+ (eight)',
'8x+': '8x+ (octuple scull)',
}
var ergtypes = {
'static': 'Concept2 static',
'dynamic': 'Concept2 dynamic',
'slides': 'Concept2 slides',
'rp3': 'RP3',
'waterrower': 'Water Rower',
'other': 'Other Indoor Rower',
}
$('#id_workouttype').on('change', function(){
if (
$(this).val() == 'water'
|| $(this).val() == 'coastal'
|| $(this).val() == 'c-boat'
|| $(this).val() == 'churchboat'
) {
var $el = $('#id_boattype');
$el.empty();
$.each(boattypes, function(key,value) {
if ( key == '{{ workout.boattype }}') {
$el.append($("<option></option").attr("value", key).attr("selected", "selected").text(value));
} else {
$el.append($("<option></option").attr("value", key).text(value));
}
});
$el.toggle(true);
}
else if (
$(this).val() == 'rower'
) {
var $el = $('#id_boattype');
$el.empty();
$.each(ergtypes, function(key,value) {
if ( key == '{{ workout.boattype }}') {
$el.append($("<option></option").attr("value", key).attr("selected", "selected").text(value));
} else {
$el.append($("<option></option").attr("value", key).text(value));
}
});
$el.toggle(true);
}
else {
$('#id_boattype').toggle(false);
$('#id_boattype').val('1x');
}
if (
$(this).val() == 'rower'
|| $(this).val() == 'dynamic'
|| $(this).val() == 'slides'
) {
$('#id_dragfactor').toggle(true);
} else {
$('#id_dragfactor').toggle(false);
$('#id_dragfactor').val('0');
}
});
$('#id_workouttype').change();
});
</script>
<script>
var td = new FormData();
var formdatasetok = false;
try {
td.set('aap','noot');
formdatasetok = true;
console.log('FormData.set OK');
}
catch(err) {
console.log('FormData.set not OK');
formdatasetok = false;
}
if (!formdatasetok) {
$("#id_dropregion").remove();
}
if (formdatasetok) {
$(document).ready(function() {
var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
console.log("CSRF token",csrftoken);
function csrfSafeMethod(method) {
// these HTTP methods do not require CSRF protection
return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method));
}
$.ajaxSetup({
beforeSend: function(xhr, settings) {
if (!csrfSafeMethod(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken);
}
}
});
console.log("Loading dropper");
jQuery.event.props.push('dataTransfer');
$(window).on('dragenter', function() {
$("#id_drop-files").css("background-color","#E9E9E4");
$("#id_dropregion").addClass("watermark").removeClass("invisible");})
$(window).on('dragleave', function() {
$("#id_drop-files").css("background-color","#FFFFFF");
$("#id_dropregion").removeClass("watermark").addClass("invisible");})
var frm = $("#file_form");
if( window.FormData === undefined ) {
console.log('no formdata');
alert("No FormData");
} else {
console.log('we have formdata');
}
var data = new FormData(frm[0]);
$('#id_file').on('change', function(evt) {
var f = this.files[0];
console.log(f);
var istcx = false;
var isgzip = false;
var size1 = 10485760;
var size2 = 2097152;
if ((/\.(tcx|TCX)/i).test(f.name)) {
istcx = true;
console.log('tcx');
if ((/\.(gz|GZ)/i).test(f.name)) {
isgzip = true;
console.log('gzip');
size1 /= 5;
size2 /= 5;
}
}
console.log(size1)
console.log(size2)
if (f.size > size1) {
alert("File Size must be smaller than 10 MB");
this.value = null;
} else {
if (f.size > size2) {
$('#id_offline').val('True');
$('#id_offline').prop('checked','True');
data.set($('#id_offline').attr('name'),$('#id_offline').prop('checked'));
console.log("Set offline to True");
$('#extra_message').text('Because of the large size, we recommend to use background processing. You will receive email when it is done.');
$('#extra_message').addClass('message');
}
}
});
$('input').each(function( i ) {
$(this).change(function() {
if ($(this).attr('type') == 'checkbox') {
data.set($(this).attr('name'),$(this).prop('checked'));
console.log($(this).attr('id'),$(this).attr('name'),$(this).prop('checked'));
} else {
data.set($(this).attr('name'),$(this).val());
if ($(this).attr('id') == 'id_file') {
data.set("file",this.files[0]);
}
console.log($(this).attr('id'),$(this).attr('name'),$(this).val());
};
});});
$('textarea').each(function( i ) {
$(this).change(function() {
data.set($(this).attr('name'),$(this).val());
console.log($(this).attr('id'),$(this).attr('name'),$(this).val());
});});
$('select').each(function( i ) {
console.log($(this).attr('name'),$(this).val());
$(this).change(function() {
data.set($(this).attr('name'),$(this).val());
console.log($(this).attr('id'),$(this).attr('name'),$(this).val());
});
});
$('#id_drop-files').bind({
drop: function(e) {
e.preventDefault();
console.log("you dropped something");
var files = e.dataTransfer.files;
console.log(files[0]);
var f = files[0];
var istcx = false;
var isgzip = false;
var size1 = 10485760;
var size2 = 1048576;
if ((/\.(tcx|TCX)/i).test(f.name)) {
istcx = true;
console.log('tcx');
if ((/\.(gz|GZ)/i).test(f.name)) {
isgzip = true;
console.log('gzip');
size1 /= 5;
size2 /= 5;
}
}
console.log(f);
console.log(size1)
console.log(size2)
if (f.size > size1) {
alert("File Size must be smaller than 10 MB");
$("#id_file").value = 0;
return false;
}
if (f.size > size2) {
$('#id_offline').val('True');
$('#id_offline').prop('checked','True');
data.set($('#id_offline').attr('name'),$('#id_offline').prop('checked'));
console.log("Set offline to True");
$('#extra_message').text('Because of the large size, we recommend to use background processing. You will receive email when it is done.');
$('#extra_message').addClass('message');
}
data.set("file",f);
// data.append("file",f);
$("#id_file").replaceWith('<div id="id_file">'+files[0].name+'&nbsp; <a class="remove" href="javascript:void(0);"><b><font color="red">X</font></b></a></div>');
},
mouseenter:function(){$("#id_drop-files").css("background-color","#E9E9E4");},
mouseleave:function(){$("#id_drop-files").css("background-color","#FFFFFF");},
dragover:function(e){
e.preventDefault();
$("#id_drop-files").css("background-color","#E9E9E4");},
dragleave:function(e){ e.preventDefault();},
});
$(document).on("click", "a.remove", function() {
$(this).parent().replaceWith('<td><input id="id_file" name="file" type="file" /></td>');
});
});
};
</script>
{% endblock %}
{% block sidebar %}
{% include 'menu_workouts.html' %}
{% endblock %}

View File

@@ -9,6 +9,9 @@ nu = datetime.datetime.now()
from django.db import transaction
from rowers.views import add_defaultfavorites
from rowers.dataflow import process_single_file, upload_handler
from django.core.files.uploadedfile import SimpleUploadedFile
from django.conf import settings
#@pytest.mark.django_db
@override_settings(TESTING=True)
@@ -28,6 +31,55 @@ class ViewTest(TestCase):
self.nu = datetime.datetime.now()
file_list = ['rowers/tests/testdata/testdata.csv',
'rowers/tests/testdata/testdata.csv',
]
@parameterized.expand(file_list)
def test_upload_view(self, filename):
# simple test to see if upload view works. Submits a DocumentsForm to /rowers/workout/upload/
login = self.c.login(username='john',password='koeinsloot')
self.assertTrue(login)
with open(filename, 'rb') as f:
file_content = f.read()
uploaded_file = SimpleUploadedFile(
"testdata.csv",
file_content,
content_type="text/csv"
)
form_data = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'file': filename,
}
request = RequestFactory()
request.user = self.u
form = DocumentsForm(data = form_data,files={'file': uploaded_file})
self.assertTrue(form.is_valid())
optionsform = UploadOptionsForm(form_data,request=request)
self.assertTrue(optionsform.is_valid())
response = self.c.post('/rowers/workout/upload/', data = form_data,
files = {'file': uploaded_file}, follow=True)
uploadoptions = form.cleaned_data.copy()
uploadoptions.update(optionsform.cleaned_data)
result = upload_handler(uploadoptions, filename)
self.assertEqual(result["status"], "processing")
@patch('rowers.dataprep.create_engine')
@patch('rowers.dataprep.read_data',side_effect=mocked_read_data)
def test_upload_view_sled(self, mocked_sqlalchemy,mocked_read_data):
@@ -35,57 +87,45 @@ class ViewTest(TestCase):
self.assertTrue(login)
filename = 'rowers/tests/testdata/testdata.csv'
f = open(filename,'rb')
with open(filename,'rb') as f:
file_data = {'file': f}
form_data = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'rpe':4,
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'file': f,
}
request = RequestFactory()
request.user = self.u
form = DocumentsForm(form_data,file_data)
form = DocumentsForm(data = form_data,files=file_data)
if not form.is_valid():
print(form.errors)
self.assertTrue(form.is_valid())
optionsform = UploadOptionsForm(form_data,request=request)
self.assertTrue(optionsform.is_valid())
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
response = self.c.post('/rowers/workout/upload/', data = form_data,
files = file_data, follow=True)
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
response = self.c.get('/rowers/workout/'+encoded1+'/', form_data, follow=True)
self.assertEqual(response.status_code, 200)
uploadoptions = form.cleaned_data.copy()
uploadoptions.update(optionsform.cleaned_data)
result = process_single_file(f, uploadoptions, 1)
self.assertEqual(result, True)
response = self.c.get('/rowers/workout/'+encoded1+'/edit/', form_data, follow=True)
self.assertEqual(response.status_code, 200)
f.close()
response = self.c.get('/rowers/workout/'+encoded1+'/workflow/',
follow=True)
self.assertEqual(response.status_code, 200)
response = self.c.get('/rowers/workout/'+encoded1+'/get-thumbnails/',
follow=True)
self.assertEqual(response.status_code, 200)
form_data = {
'name':'aap',
@@ -206,7 +246,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -252,7 +292,7 @@ class ViewTest(TestCase):
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -326,7 +366,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -366,7 +406,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -406,7 +446,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -446,7 +486,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -454,13 +494,13 @@ class ViewTest(TestCase):
url = reverse('otw_use_gps',kwargs={'id':encoded1})
response = self.c.get(url,follow=True)
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
url = reverse('otw_use_impeller',kwargs={'id':encoded1})
response = self.c.get(url,follow=True)
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
@@ -502,7 +542,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -541,7 +581,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -581,7 +621,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -622,7 +662,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -662,7 +702,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -699,7 +739,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -735,7 +775,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -773,7 +813,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -811,7 +851,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -847,7 +887,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -885,7 +925,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -923,7 +963,7 @@ class ViewTest(TestCase):
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
f.close()
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
@@ -958,7 +998,7 @@ class ViewTest(TestCase):
form = DocumentsForm(form_data,file_data)
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded1+'/edit/',
self.assertRedirects(response, expected_url='/rowers/list-workouts/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)

View File

@@ -9,6 +9,7 @@ import pandas as pd
nu = datetime.datetime.now()
from rowers import tasks
from rowers import upload_tasks
import rowers.courses as courses
from rowers.integrations.sporttracks import default as stdefault
@@ -160,7 +161,7 @@ class AsyncTaskTests(TestCase):
]
}
}
res = tasks.summaryfromsplitdata(splitdata,data,'test.csv')
res = upload_tasks.summaryfromsplitdata(splitdata,data,'test.csv')
self.assertEqual(len(res[0]),478)
@@ -182,7 +183,7 @@ class AsyncTaskTests(TestCase):
@patch('rowers.tasks.requests.post',side_effect=mocked_requests)
@patch('rowers.tasks.requests.session',side_effect=mocked_requests)
def test_fetch_strava_workout(self, mock_get, mock_post, mock_Session):
res = tasks.fetch_strava_workout('aap',None,12,'rowers/tests/testdata/temp/tesmp.csv',
res = upload_tasks.fetch_strava_workout('aap',None,12,'rowers/tests/testdata/temp/tesmp.csv',
self.u.id)
self.assertEqual(res,1)

View File

@@ -13,7 +13,7 @@ import numpy as np
import rowers
from rowers import dataprep
from rowers import tasks
from rowers import tasks, upload_tasks
import urllib
import json
@@ -703,7 +703,7 @@ class C2Objects(DjangoTestCase):
response = self.c.get('/rowers/workout/c2import/31/',follow=True)
expected_url = '/rowers/workout/c2import/'
result = tasks.handle_c2_getworkout(self.r.user.id,self.r.c2token,31,self.r.defaulttimezone)
result = upload_tasks.handle_c2_getworkout(self.r.user.id,self.r.c2token,31,self.r.defaulttimezone)
self.assertRedirects(response,
expected_url=expected_url,
@@ -733,7 +733,7 @@ class C2Objects(DjangoTestCase):
for item in c2workoutdata['data']:
alldata[item['id']] = item
res = tasks.handle_c2_async_workout(alldata,self.u.id,self.r.c2token,33991243,0,self.r.defaulttimezone)
res = upload_tasks.handle_c2_async_workout(alldata,self.u.id,self.r.c2token,33991243,0,self.r.defaulttimezone)
self.assertEqual(res, 1)
@@ -1309,7 +1309,7 @@ class RP3Objects(DjangoTestCase):
startdatetime = timezone.now()-datetime.timedelta(days=30)
max_attempts = 2
res = tasks.handle_rp3_async_workout(userid,rp3token,rp3id,startdatetime,max_attempts)
res = upload_tasks.handle_rp3_async_workout(userid,rp3token,rp3id,startdatetime,max_attempts)
self.assertEqual(res,1)
@patch('rowers.integrations.rp3.requests.post', side_effect=mocked_requests)

View File

@@ -714,8 +714,6 @@ class PermissionsViewTests(TestCase):
url = reverse('team_workout_upload_view')
aantal = len(Workout.objects.filter(user=self.rbasic))
response = self.c.get(url)
self.assertEqual(response.status_code,200)
@@ -743,9 +741,6 @@ class PermissionsViewTests(TestCase):
expected_url = url,
status_code=302,target_status_code=200)
aantal2 = len(Workout.objects.filter(user=self.rbasic))
self.assertEqual(aantal2,aantal+1)
## Coach can upload on behalf of athlete - if team allows
@patch('rowers.dataprep.create_engine')

View File

@@ -19,6 +19,7 @@ import polars as pl
from rowers import interactiveplots
from rowers import dataprep
from rowers import tasks
from rowers import upload_tasks
from rowers import plannedsessions
from rowers.views.workoutviews import get_video_id
@@ -124,7 +125,7 @@ class OtherUnitTests(TestCase):
s = f.read()
data = json.loads(s)
splitdata = data['workout']['intervals']
summary = tasks.summaryfromsplitdata(splitdata,data,'aap.txt')
summary = upload_tasks.summaryfromsplitdata(splitdata,data,'aap.txt')
self.assertEqual(len(summary),3)
sums = summary[0]
@@ -445,6 +446,7 @@ class DataPrepTests(TestCase):
result = get_random_file(filename='rowers/tests/testdata/uherskehradiste_otw.csv')
self.wuh_otw = WorkoutFactory(user=self.r,
csvfilename=result['filename'],
starttime=result['starttime'],
@@ -477,7 +479,6 @@ class DataPrepTests(TestCase):
pass
def test_timezones(self):
#row = rowingdata.rowingdata(csvfile='rowers.tests/testdata/testdata_210616_075409.csv')
row = rowingdata.rowingdata(csvfile='rowers/tests/testdata/testdata_210616_075409.csv')
aware = datetime.datetime(2021,6,16,7,54,9,999000,tzinfo=pytz.timezone('Europe/Amsterdam'))
row.rowdatetime = aware
@@ -496,7 +497,6 @@ class DataPrepTests(TestCase):
def test_timezones2(self):
#row = rowingdata.rowingdata(csvfile='rowers.tests/testdata/testdata_210616_075409.csv')
row = rowingdata.rowingdata(csvfile='rowers/tests/testdata/testdata_210616_075409.csv')
naive = datetime.datetime(2021,6,16,7,54,9,999000)
timezone = pytz.timezone('Europe/Prague')
@@ -517,7 +517,6 @@ class DataPrepTests(TestCase):
self.assertEqual(startdate,'2021-06-16')
def test_timezones3(self):
#row = rowingdata.rowingdata(csvfile='rowers.tests/testdata/testdata_210616_075409.csv')
row = rowingdata.rowingdata(csvfile='rowers/tests/testdata/testdata_210616_075409.csv')
naive = datetime.datetime(2021,6,16,7,54,9,999000)
row.rowdatetime = naive
@@ -527,7 +526,6 @@ class DataPrepTests(TestCase):
self.assertEqual(timezone_str,'Europe/Amsterdam')
def test_timezones4(self):
#row = rowingdata.rowingdata(csvfile='rowers.tests/testdata/testdata_210616_075409.csv')
row = rowingdata.rowingdata(csvfile='rowers/tests/testdata/testdata_210616_075409.csv')
naive = datetime.datetime(2021,6,15,19,55,13,400000)
timezone = pytz.timezone('America/Los_Angeles')
@@ -553,7 +551,6 @@ class DataPrepTests(TestCase):
self.assertEqual(startdate,'2021-06-15')
def test_timezones5(self):
#row = rowingdata.rowingdata(csvfile='rowers.tests/testdata/testdata_210616_075409.csv')
row = rowingdata.rowingdata(csvfile='rowers/tests/testdata/testdata_210616_075409.csv')
naive = datetime.datetime(2021,6,15,19,55,13,400000)
timezone = pytz.timezone('America/Los_Angeles')

View File

@@ -39,128 +39,7 @@ class ForceUnits(TestCase):
def tearDown(self):
dataprep.delete_strokedata(1)
def test_upload_painsled_lbs(self):
login = self.c.login(username=self.u.username, password=self.password)
self.assertTrue(login)
filename = 'rowers/tests/testdata/PainsledForce.csv'
f = open(filename,'rb')
file_data = {'file': f}
form_data = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'upload_to_c2':False,
'plottype':'timeplot',
'rpe': 1,
'file': f,
}
form = DocumentsForm(form_data,file_data)
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded13+'/edit/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
f.close()
w = Workout.objects.get(id=1)
self.assertEqual(w.forceunit,'lbs')
df = dataprep.read_data(['averageforce'],ids=[13])
df = dataprep.remove_nulls_pl(df)
average_N = int(df['averageforce'].mean())
self.assertEqual(average_N,400)
data = dataprep.read_df_sql(13)
average_N = int(data['averageforce'].mean())
self.assertEqual(average_N,398)
df,row = dataprep.getrowdata_db(id=13)
average_N = int(df['averageforce'].mean())
self.assertEqual(average_N,398)
df = dataprep.clean_df_stats(df,ignoreadvanced=False)
average_N = int(df['averageforce'].mean())
self.assertEqual(average_N,398)
def test_upload_speedcoach_N(self):
login = self.c.login(username=self.u.username, password=self.password)
self.assertTrue(login)
filename = 'rowers/tests/testdata/EmpowerSpeedCoachForce.csv'
f = open(filename,'rb')
file_data = {'file': f}
form_data = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe': 1,
'upload_to_c2':False,
'plottype':'timeplot',
'file': f,
}
form = DocumentsForm(form_data,file_data)
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded13+'/edit/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
f.close()
w = Workout.objects.get(id=13)
self.assertEqual(w.forceunit,'N')
df = dataprep.read_data(['averageforce'],ids=[13])
df = dataprep.remove_nulls_pl(df)
average_N = int(df['averageforce'].mean())
self.assertEqual(average_N,271)
def test_upload_speedcoach_colin(self):
login = self.c.login(username=self.u.username, password=self.password)
self.assertTrue(login)
filename = 'rowers/tests/testdata/colinforce.csv'
f = open(filename,'rb')
file_data = {'file': f}
form_data = {
'title':'test',
'rpe':1,
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'upload_to_c2':False,
'plottype':'timeplot',
'file': f,
}
form = DocumentsForm(form_data,file_data)
response = self.c.post('/rowers/workout/upload/', form_data, follow=True)
self.assertRedirects(response, expected_url='/rowers/workout/'+encoded13+'/edit/',
status_code=302,target_status_code=200)
self.assertEqual(response.status_code, 200)
f.close()
w = Workout.objects.get(id=13)
self.assertEqual(w.forceunit,'N')
df = dataprep.read_data(['averageforce'],ids=[13])
df = dataprep.remove_nulls_pl(df)
average_N = int(df['averageforce'].mean())
self.assertEqual(average_N,120)
@override_settings(TESTING=True)
class TestForceUnit(TestCase):

View File

@@ -0,0 +1,299 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
#from __future__ import print_function
from .statements import *
nu = datetime.datetime.now()
from django.db import transaction
import shutil
from rowers.views import add_defaultfavorites
from rowers.dataflow import process_single_file, upload_handler, unzip_and_process
from django.core.files.uploadedfile import SimpleUploadedFile
from django.conf import settings
from rowingdata import get_file_type
file_list = [
'rowers/tests/testdata/testdata.csv',
'rowers/tests/testdata/testdata.csv.gz',
'rowers/tests/testdata/tim.csv',
'rowers/tests/testdata/crewnerddata.tcx',
'rowers/tests/testdata/Speedcoach2example.csv',
'rowers/tests/testdata/Impeller.csv',
'rowers/tests/testdata/speedcoach3test3.csv',
'rowers/tests/testdata/SpeedCoach2Linkv1.27.csv',
'rowers/tests/testdata/SpeedCoach2Link_interval.csv',
'rowers/tests/testdata/NoHR.tcx',
'rowers/tests/testdata/rowinginmotionexample.tcx',
'rowers/tests/testdata/RP_testdata.csv',
'rowers/tests/testdata/mystery.csv',
'rowers/tests/testdata/RP_interval.csv',
'rowers/tests/testdata/3x250m.fit',
'rowers/tests/testdata/painsled_desktop_example.csv',
'rowers/tests/testdata/ergdata_example.csv',
'rowers/tests/testdata/boatcoach_2021-09-09__18-15-53.csv',
'rowers/tests/testdata/colinforce.csv',
'rowers/tests/testdata/PainsledForce.csv',
'rowers/tests/testdata/EmpowerSpeedCoachForce.csv',
'rowers/tests/testdata/boatcoach.csv',
'rowers/tests/testdata/ergstick.csv',
]
fail_list = [
'rowers/tests/testdata/lofoten.jpg',
'rowers/tests/testdata/c2records.json',
'rowers/tests/testdata/alphen.kml',
'rowers/tests/testdata/testdata.gpx'
]
#@pytest.mark.django_db
@override_settings(TESTING=True)
class ViewTest(TestCase):
def setUp(self):
redis_connection.publish('tasks','KILL')
self.c = Client()
self.u = User.objects.create_user('john',
'sander@ds.ds',
'koeinsloot')
self.r = Rower.objects.create(user=self.u,gdproptin=True, ftpset=True,surveydone=True,
gdproptindate=timezone.now(),
rowerplan='pro',
)
add_defaultfavorites(self.r)
self.nu = datetime.datetime.now()
# copy every file in fail_list to rowers/tests/testdata/backup folder
# Zorg ervoor dat de backup-map bestaat
backup_dir = 'rowers/tests/testdata/backup'
os.makedirs(backup_dir, exist_ok=True)
# Kopieer elk bestand in fail_list naar de backup-map
for file_path in fail_list:
if os.path.exists(file_path):
shutil.copy(file_path, backup_dir)
else:
print(f"Bestand niet gevonden: {file_path}")
def tearDown(self):
backup_dir = 'rowers/tests/testdata/backup'
for file_path in fail_list:
backup_file = os.path.join(backup_dir, os.path.basename(file_path))
if os.path.exists(backup_file):
shutil.copy(backup_file, os.path.dirname(file_path))
else:
print(f"Backup-bestand niet gevonden: {backup_file}")
@parameterized.expand(file_list)
@patch('rowers.dataflow.myqueue')
def test_upload_view(self, filename, mocked_myqueue):
# simple test to see if upload view works. Submits a DocumentsForm to /rowers/workout/upload/
login = self.c.login(username='john',password='koeinsloot')
self.assertTrue(login)
with open(filename, 'rb') as f:
file_content = f.read()
uploaded_file = SimpleUploadedFile(
"testdata.csv",
file_content,
content_type="text/csv"
)
form_data = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'file': filename,
}
request = RequestFactory()
request.user = self.u
form = DocumentsForm(data = form_data,files={'file': uploaded_file})
self.assertTrue(form.is_valid())
optionsform = UploadOptionsForm(form_data,request=request)
self.assertTrue(optionsform.is_valid())
response = self.c.post('/rowers/workout/upload/', data = form_data,
files = {'file': uploaded_file}, follow=True)
self.assertEqual(response.status_code, 200)
uploadoptions = form.cleaned_data.copy()
uploadoptions.update(optionsform.cleaned_data)
result = upload_handler(uploadoptions, filename)
self.assertEqual(result["status"], "processing")
@parameterized.expand(fail_list)
@patch('rowers.dataflow.myqueue')
def test_upload_view(self, filename, mocked_myqueue):
# simple test to see if upload view works. Submits a DocumentsForm to /rowers/workout/upload/
login = self.c.login(username='john',password='koeinsloot')
self.assertTrue(login)
with open(filename, 'rb') as f:
file_content = f.read()
uploaded_file = SimpleUploadedFile(
"testdata.csv",
file_content,
content_type="text/csv"
)
form_data = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'file': filename,
}
request = RequestFactory()
request.user = self.u
form = DocumentsForm(data = form_data,files={'file': uploaded_file})
self.assertTrue(form.is_valid())
optionsform = UploadOptionsForm(form_data,request=request)
self.assertTrue(optionsform.is_valid())
response = self.c.post('/rowers/workout/upload/', data = form_data,
files = {'file': uploaded_file}, follow=True)
self.assertEqual(response.status_code, 200)
uploadoptions = form.cleaned_data.copy()
uploadoptions.update(optionsform.cleaned_data)
result = upload_handler(uploadoptions, filename)
self.assertEqual(result["status"], "error")
@parameterized.expand(file_list)
@patch('rowers.dataflow.myqueue')
def test_process_single_file(self, filename, mocked_myqueue):
uploadoptions = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'user': self.u,
'file': filename,
}
result, f2 = process_single_file(filename, uploadoptions, 1)
self.assertEqual(result, True)
os.remove(f2+'.gz')
# process a single file without 'user'
@patch('rowers.dataflow.myqueue')
def test_process_single_file_nouser(self, mocked_myqueue):
filename = 'rowers/tests/testdata/testdata.csv'
uploadoptions = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'useremail': self.u.email,
'file': filename,
}
result, f2 = process_single_file(filename, uploadoptions, 1)
self.assertEqual(result, True)
os.remove(f2+'.gz')
# process a zip file
@patch('rowers.dataflow.myqueue')
def test_process_single_zipfile(self, mocked_myqueue):
filename = 'rowers/tests/testdata/zipfile.zip'
uploadoptions = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'user': self.u,
'file': filename,
}
result = process_single_file(filename, uploadoptions, 1)
self.assertEqual(result["status"], "error")
result = upload_handler(uploadoptions, filename)
self.assertEqual(result["status"], "processing")
# process a single file without 'title'
@patch('rowers.dataflow.myqueue')
def test_process_single_file_nouser(self, mocked_myqueue):
filename = 'rowers/tests/testdata/testdata.csv'
uploadoptions = {
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'user': self.u,
'file': filename,
}
result, f2 = process_single_file(filename, uploadoptions, 1)
self.assertEqual(result, True)
os.remove(f2+'.gz')
@patch('rowers.dataflow.myqueue')
def test_process_zip_file(self, mocked_myqueue):
filename = 'rowers/tests/testdata/zipfile.zip'
uploadoptions = {
'title':'test',
'workouttype':'rower',
'boattype':'1x',
'notes':'aap noot mies',
'make_plot':False,
'rpe':6,
'upload_to_c2':False,
'plottype':'timeplot',
'landingpage':'workout_edit_view',
'raceid':0,
'user': self.u,
'file': filename,
}
result = unzip_and_process(filename, uploadoptions, 1)
self.assertEqual(result['status'], "completed")

178
rowers/tests/testdata/backup/alphen.kml vendored Normal file
View File

@@ -0,0 +1,178 @@
<?xml version="1.0" ?>
<kml xmlns:gx="http://www.google.com/kml/ext/2.2" xmlns:kml="http://www.google.com/kml/ext/2.2" xmlns:atom="http://www.w3.org/2005/Atom" xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Courses.kml</name>
<Folder>
<name>Courses</name>
<Folder>
<name>Alphen - Alphen aan den Rijn</name>
<open>1</open>
<Placemark>
<name>Start</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.704149601313898,52.14611068342334,0 4.704648516706039,52.14606840788696,0 4.704642182077736,52.14626893773362,0 4.704151599747837,52.14628828501986,0 4.704149601313898,52.14611068342334,0 4.704149601313898,52.14611068342334,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 1</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.704040567073562,52.14772365703576,0 4.704544185247905,52.14767250842382,0 4.704570221164488,52.14791407188889,0 4.704130359234369,52.14797079566858,0 4.704040567073562,52.14772365703576,0 4.704040567073562,52.14772365703576,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 2</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.707120374629225,52.15459940303027,0 4.707573702026327,52.15460568431943,0 4.70761596147063,52.15486728249238,0 4.707159504658982,52.15489881627455,0 4.707120374629225,52.15459940303027,0 4.707120374629225,52.15459940303027,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 3</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.709028668490356,52.1646474322453,0 4.70984931790314,52.16449178436365,0 4.709978566943311,52.16488586779201,0 4.709244456319242,52.16499245615274,0 4.709028668490356,52.1646474322453,0 4.709028668490356,52.1646474322453,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 4</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.718138359290078,52.17865355742074,0 4.718653235056161,52.17830639665007,0 4.719134204848634,52.17862031168055,0 4.71867160984541,52.17894003397144,0 4.718138359290078,52.17865355742074,0 4.718138359290078,52.17865355742074,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 5</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.727641648412835,52.18284846695732,0 4.728273789904367,52.18251973845241,0 4.728577606945771,52.1827641768111,0 4.7279847617705,52.1830837392454,0 4.727641648412835,52.18284846695732,0 4.727641648412835,52.18284846695732,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 6</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.738716857017891,52.19396028458393,0 4.739294818571407,52.19389560588872,0 4.739411118817641,52.19428660874426,0 4.738864571028594,52.19431307372239,0 4.738716857017891,52.19396028458393,0 4.738716857017891,52.19396028458393,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 7</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.734183821236371,52.20620514880871,0 4.734924962205387,52.20637199686158,0 4.734802543714663,52.20688025274802,0 4.733601274999542,52.20663721340052,0 4.734183821236371,52.20620514880871,0 4.734183821236371,52.20620514880871,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 8</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.738785303605908,52.19457123452171,0 4.739333350356509,52.19459196501802,0 4.739304304831564,52.19482691469288,0 4.73885420703549,52.19479878738656,0 4.738785303605908,52.19457123452171,0 4.738785303605908,52.19457123452171,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 9</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.728292586661338,52.18327969510192,0 4.728884338045631,52.18302182842039,0 4.729083849790216,52.1833152834237,0 4.728606271720666,52.18355598784883,0 4.728292586661338,52.18327969510192,0 4.728292586661338,52.18327969510192,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 10</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.717008631662971,52.17788756203277,0 4.717714777374475,52.17758571819474,0 4.718168595226933,52.17803093936305,0 4.717634575621297,52.17832999894938,0 4.717008631662971,52.17788756203277,0 4.717008631662971,52.17788756203277,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 11</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.708580146922809,52.16405851453961,0 4.709467162927956,52.16392338577828,0 4.709761923185198,52.16427786809471,0 4.708922971852094,52.16448915385681,0 4.708580146922809,52.16405851453961,0 4.708580146922809,52.16405851453961,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 12</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.70716800510311,52.15500418035832,0 4.707671825192278,52.15498496004398,0 4.707743878685751,52.15525628533189,0 4.707149393888881,52.1553218720998,0 4.70716800510311,52.15500418035832,0 4.70716800510311,52.15500418035832,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Gate 13</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.704140681716737,52.14813498986593,0 4.704864196194787,52.1479883822655,0 4.705153909432487,52.14838874308533,0 4.704223464041033,52.14854260247372,0 4.704140681716737,52.14813498986593,0 4.704140681716737,52.14813498986593,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Finish</name>
<Polygon>
<tessellate>1</tessellate>
<outerBoundaryIs>
<LinearRing>
<coordinates>4.70414987291546,52.1461319705247,0 4.704561170436561,52.14607111930849,0 4.704642182077736,52.14626893773362,0 4.70415735390207,52.14628831020436,0 4.70414987291546,52.1461319705247,0 4.70414987291546,52.1461319705247,0</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
</Folder>
</Folder>
</Document>
</kml>

BIN
rowers/tests/testdata/backup/lofoten.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 263 KiB

View File

@@ -0,0 +1,574 @@
<?xml version="1.0" encoding="UTF-8" standalone="no" ?><gpx xmlns="http://www.topografix.com/GPX/1/1" xmlns:gpxx="http://www.garmin.com/xmlschemas/GpxExtensions/v3" xmlns:gpxtpx="http://www.garmin.com/xmlschemas/TrackPointExtension/v1" creator="Oregon 400t" version="1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd http://www.garmin.com/xmlschemas/GpxExtensions/v3 http://www.garmin.com/xmlschemas/GpxExtensionsv3.xsd http://www.garmin.com/xmlschemas/TrackPointExtension/v1 http://www.garmin.com/xmlschemas/TrackPointExtensionv1.xsd"><metadata><link href="http://www.garmin.com"><text>Garmin International</text></link><time>2016-05-20T15:41:26</time></metadata><trk><name>Export by rowingdata</name><trkseg> <trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:26+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:29.238150+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:32.148290+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:35.269000+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:38.152180+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:41.148270+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:44.148910+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:46.908250+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:49.819010+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:52.942510+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:55.639670+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:41:58.370000+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:01.188270+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:04.008300+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:06.888990+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:09.678900+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:12.469140+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:15.199010+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:17.963080+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:20.658340+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:23.538800+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:26.269790+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:28.848350+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:31.729550+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:34.398400+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:37.038360+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:39.499250+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:42.349070+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:45.079070+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:47.752890+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:50.452350+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:53.182630+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:55.789410+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:42:58.671890+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:01.338860+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:04.068490+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:06.862620+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:09.618500+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:12.379160+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:15.229200+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:17.963150+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:20.692490+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:23.628520+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:26.329210+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:29.148960+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:31.668570+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:34.490920+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:37.369250+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:40.189230+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:42.798860+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:45.708750+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:48.318590+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:51.199500+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:53.869290+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:56.572490+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:43:59.212410+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:01.912890+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:04.459350+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:07.249360+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:09.949930+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:12.619870+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:15.378800+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:18.049420+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:20.719440+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:23.298970+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:26.178820+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:28.669980+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:31.429270+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:34.042790+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:36.589070+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:39.412800+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:42.078870+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:44.783760+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:47.450710+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:50.149400+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:52.789720+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:55.429750+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:44:58.069700+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:00.742790+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:03.442700+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:06.139610+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:08.689490+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:11.479530+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:14.119610+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:16.792860+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:19.368950+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:22.158960+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:24.889580+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:27.558940+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:30.469760+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:33.259860+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:36.079590+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:38.899560+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:41.689980+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:44.568940+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:47.329670+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:50.149560+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:52.969660+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:55.879910+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:45:58.789690+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:01.729660+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:04.669610+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:07.549730+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:10.458930+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:13.488980+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:16.429320+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:19.519650+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:22.459630+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:25.338880+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:28.459530+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:31.401590+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:34.339560+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:37.309450+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:40.098920+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:43.039950+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:46.039490+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:48.979630+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:51.949590+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:54.709590+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:46:57.589710+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:00.503120+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:03.408950+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:06.323410+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:09.229670+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:12.198960+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:15.079930+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:17.989660+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:20.959680+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:23.869730+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:26.782970+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:29.688910+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:32.539570+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:35.449720+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:38.329080+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:41.148960+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:44.088880+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:47.150600+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:50.029750+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:52.998850+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:55.880360+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:47:58.789400+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:01.639760+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:04.492770+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:07.429530+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:10.373270+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:13.309500+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:16.279570+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:19.160740+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:21.948820+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:25.039520+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:27.949340+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:30.890880+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:33.648790+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:36.770050+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:39.499600+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:42.559140+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:45.439020+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:48.439810+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:51.379570+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:54.259600+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:48:57.139300+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:00.049550+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:02.838790+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:05.839540+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:08.749400+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:11.689540+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:14.538900+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:17.389440+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:20.058880+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:23.059530+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:25.880610+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:28.608730+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:31.582600+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:34.278700+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:37.068660+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:40.039460+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:42.889790+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:45.772580+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:48.708690+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:51.679450+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:54.499470+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:49:57.409440+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:00.439330+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:03.408680+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:06.378680+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:09.168860+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:12.229650+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:15.138650+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:18.049470+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:20.959460+00:00</time>
</trkpt>
<trkpt lat="0.0" lon="0.0">
<time>2016-05-20T13:50:23.242360+00:00</time>
</trkpt>
</trkseg></trk></gpx>

View File

@@ -1,255 +0,0 @@
Concept2 Utility - Version 7.06.15,,,,,,,,,,,,,,,,,,
,,,,,,,,,,,,,,,,,,
Log Data for:,F,,,,,,,,,,,,,,,,,
,,,,,Total Workout Results,,,,Split or Work Interval Results,,,,Results Calculated by Formulas,,,Interval Rest Results,,
,Name,Date,Time of Day,Workout Name,Time,Meters,Avg SPM,Avg Heart Rate,Time,Meters,SPM,Heart Rate,/500m,Cal/hr,Watt,Time,Meters,Heart Rate
,,,,,,,,,,,,,,,,,,
,F,2/3/2017,7:48,0:25:03,25:03.6,6326,23,163,,,,,01:58.8,1017,209,,,
,F,2/3/2017,7:48,0:25:03,,,,,06:00.0,1484,23,150,02:01.2,975,196,,,
,F,2/3/2017,7:48,0:25:03,,,,,12:00.0,1532,24,161,01:57.4,1042,216,,,
,F,2/3/2017,7:48,0:25:03,,,,,18:00.0,1504,23,163,01:59.6,1002,204,,,
,F,2/3/2017,7:48,0:25:03,,,,,24:00.0,1535,23,171,01:57.2,1047,217,,,
,F,2/3/2017,7:48,0:25:03,,,,,25:03.6,271,23,170,01:57.3,1045,217,,,
,,,,,,,,,,,,,,,,,,
,F,2/3/2017,7:21,0:01:23,01:23.7,312,20,116,,,,,02:14.1,799,145,,,
,F,2/3/2017,7:21,0:01:23,,,,,01:23.7,313,20,116,02:13.7,803,146,,,
,,,,,,,,,,,,,,,,,,
,F,1/13/2017,8:42,0:45:00,45:00.0,11437,23,168,,,,,01:58.0,1032,213,,,
,F,1/13/2017,8:42,0:45:00,,,,,03:00.0,773,24,155,01:56.4,1063,222,,,
,F,1/13/2017,8:42,0:45:00,,,,,06:00.0,769,24,161,01:57.0,1051,218,,,
,F,1/13/2017,8:42,0:45:00,,,,,09:00.0,769,24,164,01:57.0,1051,218,,,
,F,1/13/2017,8:42,0:45:00,,,,,12:00.0,770,24,165,01:56.8,1054,219,,,
,F,1/13/2017,8:42,0:45:00,,,,,15:00.0,765,24,168,01:57.6,1039,215,,,
,F,1/13/2017,8:42,0:45:00,,,,,18:00.0,753,23,165,01:59.5,1005,205,,,
,F,1/13/2017,8:42,0:45:00,,,,,21:00.0,770,24,171,01:56.8,1054,219,,,
,F,1/13/2017,8:42,0:45:00,,,,,24:00.0,764,24,167,01:57.8,1036,214,,,
,F,1/13/2017,8:42,0:45:00,,,,,27:00.0,763,24,169,01:57.9,1033,213,,,
,F,1/13/2017,8:42,0:45:00,,,,,30:00.0,770,24,173,01:56.8,1054,219,,,
,F,1/13/2017,8:42,0:45:00,,,,,33:00.0,764,23,173,01:57.8,1036,214,,,
,F,1/13/2017,8:42,0:45:00,,,,,36:00.0,739,23,172,02:01.7,966,194,,,
,F,1/13/2017,8:42,0:45:00,,,,,39:00.0,723,22,171,02:04.4,924,181,,,
,F,1/13/2017,8:42,0:45:00,,,,,42:00.0,759,23,175,01:58.5,1022,210,,,
,F,1/13/2017,8:42,0:45:00,,,,,45:00.0,787,23,179,01:54.3,1105,234,,,
,,,,,,,,,,,,,,,,,,
,F,1/13/2017,7:57,0:04:53,04:53.6,1080,20,71,,,,,02:15.9,779,139,,,
,F,1/13/2017,7:57,0:04:53,,,,,04:53.6,1081,20,71,02:15.8,780,140,,,
,,,,,,,,,,,,,,,,,,
,F,1/10/2017,8:29,0:45:00,45:00.0,11260,21,170,,,,,01:59.8,998,203,,,
,F,1/10/2017,8:29,0:45:00,,,,,09:00.0,2320,22,168,01:56.3,1064,222,,,
,F,1/10/2017,8:29,0:45:00,,,,,18:00.0,2275,22,168,01:58.6,1020,209,,,
,F,1/10/2017,8:29,0:45:00,,,,,27:00.0,2142,21,167,02:06.0,901,175,,,
,F,1/10/2017,8:29,0:45:00,,,,,36:00.0,2243,21,173,02:00.3,990,201,,,
,F,1/10/2017,8:29,0:45:00,,,,,45:00.0,2281,22,177,01:58.3,1026,211,,,
,,,,,,,,,,,,,,,,,,
,F,1/10/2017,7:43,0:04:36,04:36.1,1048,21,113,,,,,02:11.7,826,153,,,
,F,1/10/2017,7:43,0:04:36,,,,,04:36.1,1049,21,113,02:11.6,828,154,,,
,,,,,,,,,,,,,,,,,,
,F,1/8/2017,8:29,0:45:00,45:00.0,10960,21,167,,,,,02:03.1,944,187,,,
,F,1/8/2017,8:29,0:45:00,,,,,09:00.0,2033,20,160,02:12.8,814,149,,,
,F,1/8/2017,8:29,0:45:00,,,,,18:00.0,2182,21,168,02:03.7,935,185,,,
,F,1/8/2017,8:29,0:45:00,,,,,27:00.0,2251,22,167,01:59.9,998,203,,,
,F,1/8/2017,8:29,0:45:00,,,,,36:00.0,2221,21,168,02:01.5,970,195,,,
,F,1/8/2017,8:29,0:45:00,,,,,45:00.0,2273,22,176,01:58.7,1018,209,,,
,,,,,,,,,,,,,,,,,,
,F,1/8/2017,7:43,0:05:01,05:01.3,1119,20,106,,,,,02:14.6,793,143,,,
,F,1/8/2017,7:43,0:05:01,,,,,05:00.0,1117,20,106,02:14.2,797,145,,,
,F,1/8/2017,7:43,0:05:01,,,,,05:01.3,3,0,106,03:36.6,418,34,,,
,,,,,,,,,,,,,,,,,,
,F,1/4/2017,9:03,v5:00/2:00r...3,55:00.0,12799,18,157,,,,,02:08.9,862,163,,,
,F,1/4/2017,9:03,v5:00/2:00r...3,,,,,05:00.0,879,16,130,02:50.6,542,70,,,
,F,1/4/2017,9:03,v5:00/2:00r...3,,,,,,,,,,,,02:00.0,128,124
,F,1/4/2017,9:03,v5:00/2:00r...3,,,,,45:00.0,10953,22,176,02:03.2,943,187,,,
,F,1/4/2017,9:03,v5:00/2:00r...3,,,,,,,,,,,,00:00.0,0,0
,F,1/4/2017,9:03,v5:00/2:00r...3,,,,,05:00.0,971,18,165,02:34.4,626,95,,,
,F,1/4/2017,9:03,v5:00/2:00r...3,,,,,,,,,,,,00:00.0,0,0
,,,,,,,,,,,,,,,,,,
,F,1/2/2017,7:56,v5:00/1:00r...2,22:14.2,5280,22,152,,,,,02:06.3,897,174,,,
,F,1/2/2017,7:56,v5:00/1:00r...2,,,,,05:00.0,1224,23,147,02:02.5,954,190,,,
,F,1/2/2017,7:56,v5:00/1:00r...2,,,,,,,,,,,,01:00.0,23,132
,F,1/2/2017,7:56,v5:00/1:00r...2,,,,,17:14.2,4060,21,158,02:07.3,883,169,,,
,F,1/2/2017,7:56,v5:00/1:00r...2,,,,,,,,,,,,01:00.0,0,0
,,,,,,,,,,,,,,,,,,
,F,10/6/2016,8:53,10000m,40:30.8,10000,21,173,,,,,02:01.5,970,195,,,
,F,10/6/2016,8:53,10000m,,,,,07:52.1,2000,23,169,01:58.0,1032,213,,,
,F,10/6/2016,8:53,10000m,,,,,08:03.3,4000,22,164,02:00.8,982,198,,,
,F,10/6/2016,8:53,10000m,,,,,08:19.4,6000,21,175,02:04.8,918,180,,,
,F,10/6/2016,8:53,10000m,,,,,08:10.7,8000,21,176,02:02.6,952,190,,,
,F,10/6/2016,8:53,10000m,,,,,08:05.3,10000,21,181,02:01.3,974,196,,,
,,,,,,,,,,,,,,,,,,
,F,10/6/2016,8:12,0:03:02,03:02.3,713,21,142,,,,,02:07.8,876,168,,,
,F,10/6/2016,8:12,0:03:02,,,,,03:02.3,714,21,142,02:07.6,878,168,,,
,,,,,,,,,,,,,,,,,,
,F,9/27/2016,16:39,0:10:19,10:19.2,2110,22,0,,,,,02:26.7,681,111,,,
,F,9/27/2016,16:39,0:10:19,,,,,05:00.0,1029,24,0,02:25.7,688,113,,,
,F,9/27/2016,16:39,0:10:19,,,,,10:00.0,1020,25,0,02:27.0,678,110,,,
,F,9/27/2016,16:39,0:10:19,,,,,10:19.2,61,19,0,02:37.3,609,90,,,
,,,,,,,,,,,,,,,,,,
,F,9/27/2016,16:26,0:01:20,01:20.8,259,22,0,,,,,02:35.9,617,92,,,
,F,9/27/2016,16:26,0:01:20,,,,,01:20.8,259,22,0,02:35.9,617,92,,,
,,,,,,,,,,,,,,,,,,
,F,9/23/2016,7:59,0:30:00,30:00.0,7142,21,163,,,,,02:06.0,901,175,,,
,F,9/23/2016,7:59,0:30:00,,,,,06:00.0,1473,22,160,02:02.1,960,192,,,
,F,9/23/2016,7:59,0:30:00,,,,,12:00.0,1409,20,162,02:07.7,877,168,,,
,F,9/23/2016,7:59,0:30:00,,,,,18:00.0,1393,21,163,02:09.2,858,162,,,
,F,9/23/2016,7:59,0:30:00,,,,,24:00.0,1429,22,158,02:05.9,902,175,,,
,F,9/23/2016,7:59,0:30:00,,,,,30:00.0,1439,21,173,02:05.0,915,179,,,
,,,,,,,,,,,,,,,,,,
,F,9/23/2016,7:29,0:03:08,03:08.0,744,21,147,,,,,02:06.3,897,174,,,
,F,9/23/2016,7:29,0:03:08,,,,,03:08.0,744,21,147,02:06.3,897,174,,,
,,,,,,,,,,,,,,,,,,
,F,9/21/2016,16:09,0:34:16,34:16.9,6512,20,0,,,,,02:37.9,605,89,,,
,F,9/21/2016,16:09,0:34:16,,,,,05:00.0,980,22,0,02:33.0,635,98,,,
,F,9/21/2016,16:09,0:34:16,,,,,10:00.0,989,20,0,02:31.6,645,100,,,
,F,9/21/2016,16:09,0:34:16,,,,,15:00.0,966,21,0,02:35.2,621,93,,,
,F,9/21/2016,16:09,0:34:16,,,,,20:00.0,938,20,0,02:39.9,594,86,,,
,F,9/21/2016,16:09,0:34:16,,,,,25:00.0,946,21,0,02:38.5,602,88,,,
,F,9/21/2016,16:09,0:34:16,,,,,30:00.0,909,21,0,02:45.0,568,78,,,
,F,9/21/2016,16:09,0:34:16,,,,,34:16.9,785,20,0,02:43.6,574,80,,,
,,,,,,,,,,,,,,,,,,
,F,9/20/2016,17:11,0:33:19,33:19.3,6519,21,0,,,,,02:33.3,634,97,,,
,F,9/20/2016,17:11,0:33:19,,,,,05:00.0,991,22,0,02:31.3,647,101,,,
,F,9/20/2016,17:11,0:33:19,,,,,10:00.0,994,21,0,02:30.9,650,102,,,
,F,9/20/2016,17:11,0:33:19,,,,,15:00.0,977,21,0,02:33.5,632,97,,,
,F,9/20/2016,17:11,0:33:19,,,,,20:00.0,968,21,0,02:34.9,623,94,,,
,F,9/20/2016,17:11,0:33:19,,,,,25:00.0,982,21,0,02:32.7,637,98,,,
,F,9/20/2016,17:11,0:33:19,,,,,30:00.0,971,21,0,02:34.4,626,95,,,
,F,9/20/2016,17:11,0:33:19,,,,,33:19.3,637,20,0,02:36.4,614,91,,,
,,,,,,,,,,,,,,,,,,
,F,9/20/2016,8:12,0:22:32,22:32.5,4731,18,158,,,,,02:22.9,712,120,,,
,F,9/20/2016,8:12,0:22:32,,,,,09:00.0,2197,22,171,02:02.8,948,189,,,
,F,9/20/2016,8:12,0:22:32,,,,,18:00.0,1781,18,136,02:31.6,645,100,,,
,F,9/20/2016,8:12,0:22:32,,,,,22:32.5,753,15,168,03:00.9,503,59,,,
,,,,,,,,,,,,,,,,,,
,F,9/20/2016,7:48,v5:00...1,05:00.0,758,14,115,,,,,03:17.8,455,45,,,
,F,9/20/2016,7:48,v5:00...1,,,,,05:00.0,759,14,115,03:17.6,456,45,,,
,F,9/20/2016,7:48,v5:00...1,,,,,,,,,,,,00:00.0,0,0
,,,,,,,,,,,,,,,,,,
,F,9/19/2016,15:28,0:31:15,31:15.4,6511,22,0,,,,,02:24.0,703,117,,,
,F,9/19/2016,15:28,0:31:15,,,,,05:00.0,1040,24,0,02:24.2,701,117,,,
,F,9/19/2016,15:28,0:31:15,,,,,10:00.0,1037,23,0,02:24.6,698,116,,,
,F,9/19/2016,15:28,0:31:15,,,,,15:00.0,1067,23,0,02:20.5,733,126,,,
,F,9/19/2016,15:28,0:31:15,,,,,20:00.0,1046,23,0,02:23.4,708,119,,,
,F,9/19/2016,15:28,0:31:15,,,,,25:00.0,1025,21,0,02:26.3,684,112,,,
,F,9/19/2016,15:28,0:31:15,,,,,30:00.0,1045,22,0,02:23.5,707,118,,,
,F,9/19/2016,15:28,0:31:15,,,,,31:15.4,252,19,0,02:29.6,659,105,,,
,,,,,,,,,,,,,,,,,,
,F,9/15/2016,18:01,0:32:53,32:53.1,6694,22,0,,,,,02:27.3,676,109,,,
,F,9/15/2016,18:01,0:32:53,,,,,05:00.0,1055,24,0,02:22.1,719,122,,,
,F,9/15/2016,18:01,0:32:53,,,,,10:00.0,1042,23,0,02:23.9,703,117,,,
,F,9/15/2016,18:01,0:32:53,,,,,15:00.0,1017,22,0,02:27.4,675,109,,,
,F,9/15/2016,18:01,0:32:53,,,,,20:00.0,1030,23,0,02:25.6,690,113,,,
,F,9/15/2016,18:01,0:32:53,,,,,25:00.0,996,23,0,02:30.6,652,102,,,
,F,9/15/2016,18:01,0:32:53,,,,,30:00.0,983,22,0,02:32.5,639,99,,,
,F,9/15/2016,18:01,0:32:53,,,,,32:53.1,572,22,0,02:31.3,647,101,,,
,,,,,,,,,,,,,,,,,,
,F,9/13/2016,16:52,0:35:12,35:12.4,6740,23,0,,,,,02:36.7,613,91,,,
,F,9/13/2016,16:52,0:35:12,,,,,10:00.0,1928,28,0,02:35.6,619,93,,,
,F,9/13/2016,16:52,0:35:12,,,,,20:00.0,1955,26,0,02:33.4,633,97,,,
,F,9/13/2016,16:52,0:35:12,,,,,30:00.0,1958,23,0,02:33.2,634,97,,,
,F,9/13/2016,16:52,0:35:12,,,,,35:12.4,900,18,0,02:53.5,530,67,,,
,,,,,,,,,,,,,,,,,,
,F,9/3/2016,11:33,0:01:26,01:26.7,113,28,0,,,,,06:23.6,321,6,,,
,F,9/3/2016,11:33,0:01:26,,,,,01:26.7,114,28,0,06:20.2,321,6,,,
,,,,,,,,,,,,,,,,,,
,F,8/8/2016,7:45,0:24:18,24:18.4,4438,15,136,,,,,02:44.3,571,79,,,
,F,8/8/2016,7:45,0:24:18,,,,,11:00.0,2322,19,147,02:22.1,719,122,,,
,F,8/8/2016,7:45,0:24:18,,,,,22:00.0,1830,15,0,03:00.3,505,60,,,
,F,8/8/2016,7:45,0:24:18,,,,,24:18.4,287,11,126,04:01.1,385,25,,,
,,,,,,,,,,,,,,,,,,
,F,7/6/2016,7:49,0:45:00,45:00.0,10872,21,164,,,,,02:04.1,929,183,,,
,F,7/6/2016,7:49,0:45:00,,,,,09:00.0,2186,22,151,02:03.5,939,186,,,
,F,7/6/2016,7:49,0:45:00,,,,,18:00.0,2222,22,163,02:01.5,971,195,,,
,F,7/6/2016,7:49,0:45:00,,,,,27:00.0,2048,20,158,02:11.8,825,153,,,
,F,7/6/2016,7:49,0:45:00,,,,,36:00.0,2146,21,169,02:05.8,904,176,,,
,F,7/6/2016,7:49,0:45:00,,,,,45:00.0,2271,22,179,01:58.8,1016,208,,,
,,,,,,,,,,,,,,,,,,
,F,7/5/2016,8:18,0:45:00,45:00.0,10900,21,168,,,,,02:03.8,934,184,,,
,F,7/5/2016,8:18,0:45:00,,,,,09:00.0,2285,23,166,01:58.1,1030,212,,,
,F,7/5/2016,8:18,0:45:00,,,,,18:00.0,2256,22,168,01:59.6,1002,204,,,
,F,7/5/2016,8:18,0:45:00,,,,,27:00.0,2156,21,174,02:05.2,913,178,,,
,F,7/5/2016,8:18,0:45:00,,,,,36:00.0,2016,20,155,02:13.9,801,146,,,
,F,7/5/2016,8:18,0:45:00,,,,,45:00.0,2188,21,177,02:03.4,941,186,,,
,,,,,,,,,,,,,,,,,,
,F,7/2/2016,7:14,0:35:17,35:17.2,8855,22,167,,,,,01:59.5,1005,205,,,
,F,7/2/2016,7:14,0:35:17,,,,,11:01.6,2800,24,161,01:58.1,1030,212,,,
,F,7/2/2016,7:14,0:35:17,,,,,10:57.7,5600,23,171,01:57.4,1043,216,,,
,F,7/2/2016,7:14,0:35:17,,,,,11:24.5,8400,22,169,02:02.2,959,192,,,
,F,7/2/2016,7:14,0:35:17,,,,,01:53.4,8855,21,167,02:04.6,922,181,,,
,,,,,,,,,,,,,,,,,,
,F,7/1/2016,7:32,10000m,40:15.0,10000,23,176,,,,,02:00.7,984,199,,,
,F,7/1/2016,7:32,10000m,,,,,07:54.1,2000,24,168,01:58.5,1023,210,,,
,F,7/1/2016,7:32,10000m,,,,,07:50.3,4000,24,174,01:57.5,1041,215,,,
,F,7/1/2016,7:32,10000m,,,,,08:24.4,6000,22,177,02:06.1,900,175,,,
,F,7/1/2016,7:32,10000m,,,,,08:02.3,8000,23,180,02:00.5,987,200,,,
,F,7/1/2016,7:32,10000m,,,,,08:03.9,10000,22,182,02:00.9,980,198,,,
,,,,,,,,,,,,,,,,,,
,F,7/1/2016,6:51,0:02:29,02:29.5,529,20,130,,,,,02:21.3,726,124,,,
,F,7/1/2016,6:51,0:02:29,,,,,02:29.5,529,20,130,02:21.3,726,124,,,
,,,,,,,,,,,,,,,,,,
,F,6/28/2016,6:45,0:30:00,30:00.0,7434,22,173,,,,,02:01.0,978,197,,,
,F,6/28/2016,6:45,0:30:00,,,,,06:00.0,1534,23,0,01:57.3,1045,217,,,
,F,6/28/2016,6:45,0:30:00,,,,,12:00.0,1342,21,166,02:14.1,799,145,,,
,F,6/28/2016,6:45,0:30:00,,,,,18:00.0,1523,24,172,01:58.1,1029,212,,,
,F,6/28/2016,6:45,0:30:00,,,,,24:00.0,1484,22,173,02:01.2,975,196,,,
,F,6/28/2016,6:45,0:30:00,,,,,30:00.0,1552,23,184,01:55.9,1072,224,,,
,,,,,,,,,,,,,,,,,,
,F,6/25/2016,7:10,0:30:00,30:00.0,7675,23,171,,,,,01:57.2,1047,217,,,
,F,6/25/2016,7:10,0:30:00,,,,,06:00.0,1542,24,159,01:56.7,1057,220,,,
,F,6/25/2016,7:10,0:30:00,,,,,12:00.0,1527,24,165,01:57.8,1035,214,,,
,F,6/25/2016,7:10,0:30:00,,,,,18:00.0,1550,24,174,01:56.1,1069,223,,,
,F,6/25/2016,7:10,0:30:00,,,,,24:00.0,1502,23,179,01:59.8,999,203,,,
,F,6/25/2016,7:10,0:30:00,,,,,30:00.0,1556,23,181,01:55.6,1078,226,,,
,,,,,,,,,,,,,,,,,,
,F,6/9/2016,7:17,0:30:00,30:00.0,7299,22,166,,,,,02:03.3,942,187,,,
,F,6/9/2016,7:17,0:30:00,,,,,06:00.0,1563,24,168,01:55.1,1088,229,,,
,F,6/9/2016,7:17,0:30:00,,,,,12:00.0,1463,22,166,02:03.0,946,188,,,
,F,6/9/2016,7:17,0:30:00,,,,,18:00.0,1371,21,160,02:11.2,832,155,,,
,F,6/9/2016,7:17,0:30:00,,,,,24:00.0,1434,22,165,02:05.5,909,177,,,
,F,6/9/2016,7:17,0:30:00,,,,,30:00.0,1468,22,174,02:02.6,953,190,,,
,,,,,,,,,,,,,,,,,,
,F,5/24/2016,7:42,0:30:00,30:00.0,7612,23,168,,,,,01:58.2,1028,212,,,
,F,5/24/2016,7:42,0:30:00,,,,,06:00.0,1534,24,158,01:57.3,1045,217,,,
,F,5/24/2016,7:42,0:30:00,,,,,12:00.0,1491,23,167,02:00.7,984,199,,,
,F,5/24/2016,7:42,0:30:00,,,,,18:00.0,1507,23,169,01:59.4,1006,205,,,
,F,5/24/2016,7:42,0:30:00,,,,,24:00.0,1532,23,171,01:57.4,1042,216,,,
,F,5/24/2016,7:42,0:30:00,,,,,30:00.0,1548,23,175,01:56.2,1066,223,,,
,,,,,,,,,,,,,,,,,,
,F,5/7/2016,9:14,0:30:00,30:00.0,7540,23,164,,,,,01:59.3,1008,206,,,
,F,5/7/2016,9:14,0:30:00,,,,,06:00.0,1493,23,162,02:00.5,987,200,,,
,F,5/7/2016,9:14,0:30:00,,,,,12:00.0,1517,23,161,01:58.6,1021,210,,,
,F,5/7/2016,9:14,0:30:00,,,,,18:00.0,1498,23,165,02:00.1,994,202,,,
,F,5/7/2016,9:14,0:30:00,,,,,24:00.0,1506,23,164,01:59.5,1005,205,,,
,F,5/7/2016,9:14,0:30:00,,,,,30:00.0,1526,23,169,01:57.9,1033,213,,,
,,,,,,,,,,,,,,,,,,
,F,4/23/2016,8:17,10000m,39:24.5,10000,23,166,,,,,01:58.2,1028,212,,,
,F,4/23/2016,8:17,10000m,,,,,07:47.0,2000,24,161,01:56.7,1056,220,,,
,F,4/23/2016,8:17,10000m,,,,,07:44.2,4000,24,171,01:56.0,1070,224,,,
,F,4/23/2016,8:17,10000m,,,,,07:50.7,6000,23,165,01:57.6,1039,215,,,
,F,4/23/2016,8:17,10000m,,,,,08:09.4,8000,22,165,02:02.3,957,191,,,
,F,4/23/2016,8:17,10000m,,,,,07:53.1,10000,23,170,01:58.2,1028,212,,,
,,,,,,,,,,,,,,,,,,
,F,4/20/2016,8:12,0:30:00,30:00.0,7961,24,170,,,,,01:53.0,1133,242,,,
,F,4/20/2016,8:12,0:30:00,,,,,06:00.0,1583,24,160,01:53.7,1119,238,,,
,F,4/20/2016,8:12,0:30:00,,,,,12:00.0,1590,24,163,01:53.2,1130,241,,,
,F,4/20/2016,8:12,0:30:00,,,,,18:00.0,1594,24,173,01:52.9,1136,243,,,
,F,4/20/2016,8:12,0:30:00,,,,,24:00.0,1589,25,174,01:53.2,1128,241,,,
,F,4/20/2016,8:12,0:30:00,,,,,30:00.0,1607,24,182,01:52.0,1157,249,,,
,,,,,,,,,,,,,,,,,,
,F,4/18/2016,8:24,0:30:00,30:00.0,7608,22,167,,,,,01:58.2,1027,211,,,
,F,4/18/2016,8:24,0:30:00,,,,,06:00.0,1508,22,154,01:59.3,1008,206,,,
,F,4/18/2016,8:24,0:30:00,,,,,12:00.0,1468,21,161,02:02.6,953,190,,,
,F,4/18/2016,8:24,0:30:00,,,,,18:00.0,1542,23,171,01:56.7,1057,220,,,
,F,4/18/2016,8:24,0:30:00,,,,,24:00.0,1544,23,172,01:56.5,1060,221,,,
,F,4/18/2016,8:24,0:30:00,,,,,30:00.0,1546,23,179,01:56.4,1063,222,,,
,,,,,,,,,,,,,,,,,,
,F,4/17/2016,9:32,0:35:33,35:33.4,8406,21,152,,,,,02:06.8,889,171,,,
,F,4/17/2016,9:32,0:35:33,,,,,11:25.3,2800,22,162,02:02.3,957,191,,,
,F,4/17/2016,9:32,0:35:33,,,,,11:02.1,5600,22,171,01:58.2,1028,212,,,
,F,4/17/2016,9:32,0:35:33,,,,,13:03.3,8400,19,138,02:19.8,740,128,,,
,F,4/17/2016,9:32,0:35:33,,,,,00:02.7,8406,0,138,03:45.0,405,31,,,
,,,,,,,,,,,,,,,,,,
,F,4/11/2016,8:29,0:37:24,37:24.9,9267,22,170,,,,,02:01.1,977,197,,,
,F,4/11/2016,8:29,0:37:24,,,,,11:03.9,2800,23,169,01:58.5,1022,210,,,
,F,4/11/2016,8:29,0:37:24,,,,,11:21.7,5600,22,171,02:01.7,967,194,,,
,F,4/11/2016,8:29,0:37:24,,,,,11:30.7,8400,22,170,02:03.3,941,187,,,
,F,4/11/2016,8:29,0:37:24,,,,,03:28.8,9267,22,170,02:00.4,989,200,,,
1 Concept2 Utility - Version 7.06.15
2
3 Log Data for: F
4 Total Workout Results Split or Work Interval Results Results Calculated by Formulas Interval Rest Results
5 Name Date Time of Day Workout Name Time Meters Avg SPM Avg Heart Rate Time Meters SPM Heart Rate /500m Cal/hr Watt Time Meters Heart Rate
6
7 F 2/3/2017 7:48 0:25:03 25:03.6 6326 23 163 01:58.8 1017 209
8 F 2/3/2017 7:48 0:25:03 06:00.0 1484 23 150 02:01.2 975 196
9 F 2/3/2017 7:48 0:25:03 12:00.0 1532 24 161 01:57.4 1042 216
10 F 2/3/2017 7:48 0:25:03 18:00.0 1504 23 163 01:59.6 1002 204
11 F 2/3/2017 7:48 0:25:03 24:00.0 1535 23 171 01:57.2 1047 217
12 F 2/3/2017 7:48 0:25:03 25:03.6 271 23 170 01:57.3 1045 217
13
14 F 2/3/2017 7:21 0:01:23 01:23.7 312 20 116 02:14.1 799 145
15 F 2/3/2017 7:21 0:01:23 01:23.7 313 20 116 02:13.7 803 146
16
17 F 1/13/2017 8:42 0:45:00 45:00.0 11437 23 168 01:58.0 1032 213
18 F 1/13/2017 8:42 0:45:00 03:00.0 773 24 155 01:56.4 1063 222
19 F 1/13/2017 8:42 0:45:00 06:00.0 769 24 161 01:57.0 1051 218
20 F 1/13/2017 8:42 0:45:00 09:00.0 769 24 164 01:57.0 1051 218
21 F 1/13/2017 8:42 0:45:00 12:00.0 770 24 165 01:56.8 1054 219
22 F 1/13/2017 8:42 0:45:00 15:00.0 765 24 168 01:57.6 1039 215
23 F 1/13/2017 8:42 0:45:00 18:00.0 753 23 165 01:59.5 1005 205
24 F 1/13/2017 8:42 0:45:00 21:00.0 770 24 171 01:56.8 1054 219
25 F 1/13/2017 8:42 0:45:00 24:00.0 764 24 167 01:57.8 1036 214
26 F 1/13/2017 8:42 0:45:00 27:00.0 763 24 169 01:57.9 1033 213
27 F 1/13/2017 8:42 0:45:00 30:00.0 770 24 173 01:56.8 1054 219
28 F 1/13/2017 8:42 0:45:00 33:00.0 764 23 173 01:57.8 1036 214
29 F 1/13/2017 8:42 0:45:00 36:00.0 739 23 172 02:01.7 966 194
30 F 1/13/2017 8:42 0:45:00 39:00.0 723 22 171 02:04.4 924 181
31 F 1/13/2017 8:42 0:45:00 42:00.0 759 23 175 01:58.5 1022 210
32 F 1/13/2017 8:42 0:45:00 45:00.0 787 23 179 01:54.3 1105 234
33
34 F 1/13/2017 7:57 0:04:53 04:53.6 1080 20 71 02:15.9 779 139
35 F 1/13/2017 7:57 0:04:53 04:53.6 1081 20 71 02:15.8 780 140
36
37 F 1/10/2017 8:29 0:45:00 45:00.0 11260 21 170 01:59.8 998 203
38 F 1/10/2017 8:29 0:45:00 09:00.0 2320 22 168 01:56.3 1064 222
39 F 1/10/2017 8:29 0:45:00 18:00.0 2275 22 168 01:58.6 1020 209
40 F 1/10/2017 8:29 0:45:00 27:00.0 2142 21 167 02:06.0 901 175
41 F 1/10/2017 8:29 0:45:00 36:00.0 2243 21 173 02:00.3 990 201
42 F 1/10/2017 8:29 0:45:00 45:00.0 2281 22 177 01:58.3 1026 211
43
44 F 1/10/2017 7:43 0:04:36 04:36.1 1048 21 113 02:11.7 826 153
45 F 1/10/2017 7:43 0:04:36 04:36.1 1049 21 113 02:11.6 828 154
46
47 F 1/8/2017 8:29 0:45:00 45:00.0 10960 21 167 02:03.1 944 187
48 F 1/8/2017 8:29 0:45:00 09:00.0 2033 20 160 02:12.8 814 149
49 F 1/8/2017 8:29 0:45:00 18:00.0 2182 21 168 02:03.7 935 185
50 F 1/8/2017 8:29 0:45:00 27:00.0 2251 22 167 01:59.9 998 203
51 F 1/8/2017 8:29 0:45:00 36:00.0 2221 21 168 02:01.5 970 195
52 F 1/8/2017 8:29 0:45:00 45:00.0 2273 22 176 01:58.7 1018 209
53
54 F 1/8/2017 7:43 0:05:01 05:01.3 1119 20 106 02:14.6 793 143
55 F 1/8/2017 7:43 0:05:01 05:00.0 1117 20 106 02:14.2 797 145
56 F 1/8/2017 7:43 0:05:01 05:01.3 3 0 106 03:36.6 418 34
57
58 F 1/4/2017 9:03 v5:00/2:00r...3 55:00.0 12799 18 157 02:08.9 862 163
59 F 1/4/2017 9:03 v5:00/2:00r...3 05:00.0 879 16 130 02:50.6 542 70
60 F 1/4/2017 9:03 v5:00/2:00r...3 02:00.0 128 124
61 F 1/4/2017 9:03 v5:00/2:00r...3 45:00.0 10953 22 176 02:03.2 943 187
62 F 1/4/2017 9:03 v5:00/2:00r...3 00:00.0 0 0
63 F 1/4/2017 9:03 v5:00/2:00r...3 05:00.0 971 18 165 02:34.4 626 95
64 F 1/4/2017 9:03 v5:00/2:00r...3 00:00.0 0 0
65
66 F 1/2/2017 7:56 v5:00/1:00r...2 22:14.2 5280 22 152 02:06.3 897 174
67 F 1/2/2017 7:56 v5:00/1:00r...2 05:00.0 1224 23 147 02:02.5 954 190
68 F 1/2/2017 7:56 v5:00/1:00r...2 01:00.0 23 132
69 F 1/2/2017 7:56 v5:00/1:00r...2 17:14.2 4060 21 158 02:07.3 883 169
70 F 1/2/2017 7:56 v5:00/1:00r...2 01:00.0 0 0
71
72 F 10/6/2016 8:53 10000m 40:30.8 10000 21 173 02:01.5 970 195
73 F 10/6/2016 8:53 10000m 07:52.1 2000 23 169 01:58.0 1032 213
74 F 10/6/2016 8:53 10000m 08:03.3 4000 22 164 02:00.8 982 198
75 F 10/6/2016 8:53 10000m 08:19.4 6000 21 175 02:04.8 918 180
76 F 10/6/2016 8:53 10000m 08:10.7 8000 21 176 02:02.6 952 190
77 F 10/6/2016 8:53 10000m 08:05.3 10000 21 181 02:01.3 974 196
78
79 F 10/6/2016 8:12 0:03:02 03:02.3 713 21 142 02:07.8 876 168
80 F 10/6/2016 8:12 0:03:02 03:02.3 714 21 142 02:07.6 878 168
81
82 F 9/27/2016 16:39 0:10:19 10:19.2 2110 22 0 02:26.7 681 111
83 F 9/27/2016 16:39 0:10:19 05:00.0 1029 24 0 02:25.7 688 113
84 F 9/27/2016 16:39 0:10:19 10:00.0 1020 25 0 02:27.0 678 110
85 F 9/27/2016 16:39 0:10:19 10:19.2 61 19 0 02:37.3 609 90
86
87 F 9/27/2016 16:26 0:01:20 01:20.8 259 22 0 02:35.9 617 92
88 F 9/27/2016 16:26 0:01:20 01:20.8 259 22 0 02:35.9 617 92
89
90 F 9/23/2016 7:59 0:30:00 30:00.0 7142 21 163 02:06.0 901 175
91 F 9/23/2016 7:59 0:30:00 06:00.0 1473 22 160 02:02.1 960 192
92 F 9/23/2016 7:59 0:30:00 12:00.0 1409 20 162 02:07.7 877 168
93 F 9/23/2016 7:59 0:30:00 18:00.0 1393 21 163 02:09.2 858 162
94 F 9/23/2016 7:59 0:30:00 24:00.0 1429 22 158 02:05.9 902 175
95 F 9/23/2016 7:59 0:30:00 30:00.0 1439 21 173 02:05.0 915 179
96
97 F 9/23/2016 7:29 0:03:08 03:08.0 744 21 147 02:06.3 897 174
98 F 9/23/2016 7:29 0:03:08 03:08.0 744 21 147 02:06.3 897 174
99
100 F 9/21/2016 16:09 0:34:16 34:16.9 6512 20 0 02:37.9 605 89
101 F 9/21/2016 16:09 0:34:16 05:00.0 980 22 0 02:33.0 635 98
102 F 9/21/2016 16:09 0:34:16 10:00.0 989 20 0 02:31.6 645 100
103 F 9/21/2016 16:09 0:34:16 15:00.0 966 21 0 02:35.2 621 93
104 F 9/21/2016 16:09 0:34:16 20:00.0 938 20 0 02:39.9 594 86
105 F 9/21/2016 16:09 0:34:16 25:00.0 946 21 0 02:38.5 602 88
106 F 9/21/2016 16:09 0:34:16 30:00.0 909 21 0 02:45.0 568 78
107 F 9/21/2016 16:09 0:34:16 34:16.9 785 20 0 02:43.6 574 80
108
109 F 9/20/2016 17:11 0:33:19 33:19.3 6519 21 0 02:33.3 634 97
110 F 9/20/2016 17:11 0:33:19 05:00.0 991 22 0 02:31.3 647 101
111 F 9/20/2016 17:11 0:33:19 10:00.0 994 21 0 02:30.9 650 102
112 F 9/20/2016 17:11 0:33:19 15:00.0 977 21 0 02:33.5 632 97
113 F 9/20/2016 17:11 0:33:19 20:00.0 968 21 0 02:34.9 623 94
114 F 9/20/2016 17:11 0:33:19 25:00.0 982 21 0 02:32.7 637 98
115 F 9/20/2016 17:11 0:33:19 30:00.0 971 21 0 02:34.4 626 95
116 F 9/20/2016 17:11 0:33:19 33:19.3 637 20 0 02:36.4 614 91
117
118 F 9/20/2016 8:12 0:22:32 22:32.5 4731 18 158 02:22.9 712 120
119 F 9/20/2016 8:12 0:22:32 09:00.0 2197 22 171 02:02.8 948 189
120 F 9/20/2016 8:12 0:22:32 18:00.0 1781 18 136 02:31.6 645 100
121 F 9/20/2016 8:12 0:22:32 22:32.5 753 15 168 03:00.9 503 59
122
123 F 9/20/2016 7:48 v5:00...1 05:00.0 758 14 115 03:17.8 455 45
124 F 9/20/2016 7:48 v5:00...1 05:00.0 759 14 115 03:17.6 456 45
125 F 9/20/2016 7:48 v5:00...1 00:00.0 0 0
126
127 F 9/19/2016 15:28 0:31:15 31:15.4 6511 22 0 02:24.0 703 117
128 F 9/19/2016 15:28 0:31:15 05:00.0 1040 24 0 02:24.2 701 117
129 F 9/19/2016 15:28 0:31:15 10:00.0 1037 23 0 02:24.6 698 116
130 F 9/19/2016 15:28 0:31:15 15:00.0 1067 23 0 02:20.5 733 126
131 F 9/19/2016 15:28 0:31:15 20:00.0 1046 23 0 02:23.4 708 119
132 F 9/19/2016 15:28 0:31:15 25:00.0 1025 21 0 02:26.3 684 112
133 F 9/19/2016 15:28 0:31:15 30:00.0 1045 22 0 02:23.5 707 118
134 F 9/19/2016 15:28 0:31:15 31:15.4 252 19 0 02:29.6 659 105
135
136 F 9/15/2016 18:01 0:32:53 32:53.1 6694 22 0 02:27.3 676 109
137 F 9/15/2016 18:01 0:32:53 05:00.0 1055 24 0 02:22.1 719 122
138 F 9/15/2016 18:01 0:32:53 10:00.0 1042 23 0 02:23.9 703 117
139 F 9/15/2016 18:01 0:32:53 15:00.0 1017 22 0 02:27.4 675 109
140 F 9/15/2016 18:01 0:32:53 20:00.0 1030 23 0 02:25.6 690 113
141 F 9/15/2016 18:01 0:32:53 25:00.0 996 23 0 02:30.6 652 102
142 F 9/15/2016 18:01 0:32:53 30:00.0 983 22 0 02:32.5 639 99
143 F 9/15/2016 18:01 0:32:53 32:53.1 572 22 0 02:31.3 647 101
144
145 F 9/13/2016 16:52 0:35:12 35:12.4 6740 23 0 02:36.7 613 91
146 F 9/13/2016 16:52 0:35:12 10:00.0 1928 28 0 02:35.6 619 93
147 F 9/13/2016 16:52 0:35:12 20:00.0 1955 26 0 02:33.4 633 97
148 F 9/13/2016 16:52 0:35:12 30:00.0 1958 23 0 02:33.2 634 97
149 F 9/13/2016 16:52 0:35:12 35:12.4 900 18 0 02:53.5 530 67
150
151 F 9/3/2016 11:33 0:01:26 01:26.7 113 28 0 06:23.6 321 6
152 F 9/3/2016 11:33 0:01:26 01:26.7 114 28 0 06:20.2 321 6
153
154 F 8/8/2016 7:45 0:24:18 24:18.4 4438 15 136 02:44.3 571 79
155 F 8/8/2016 7:45 0:24:18 11:00.0 2322 19 147 02:22.1 719 122
156 F 8/8/2016 7:45 0:24:18 22:00.0 1830 15 0 03:00.3 505 60
157 F 8/8/2016 7:45 0:24:18 24:18.4 287 11 126 04:01.1 385 25
158
159 F 7/6/2016 7:49 0:45:00 45:00.0 10872 21 164 02:04.1 929 183
160 F 7/6/2016 7:49 0:45:00 09:00.0 2186 22 151 02:03.5 939 186
161 F 7/6/2016 7:49 0:45:00 18:00.0 2222 22 163 02:01.5 971 195
162 F 7/6/2016 7:49 0:45:00 27:00.0 2048 20 158 02:11.8 825 153
163 F 7/6/2016 7:49 0:45:00 36:00.0 2146 21 169 02:05.8 904 176
164 F 7/6/2016 7:49 0:45:00 45:00.0 2271 22 179 01:58.8 1016 208
165
166 F 7/5/2016 8:18 0:45:00 45:00.0 10900 21 168 02:03.8 934 184
167 F 7/5/2016 8:18 0:45:00 09:00.0 2285 23 166 01:58.1 1030 212
168 F 7/5/2016 8:18 0:45:00 18:00.0 2256 22 168 01:59.6 1002 204
169 F 7/5/2016 8:18 0:45:00 27:00.0 2156 21 174 02:05.2 913 178
170 F 7/5/2016 8:18 0:45:00 36:00.0 2016 20 155 02:13.9 801 146
171 F 7/5/2016 8:18 0:45:00 45:00.0 2188 21 177 02:03.4 941 186
172
173 F 7/2/2016 7:14 0:35:17 35:17.2 8855 22 167 01:59.5 1005 205
174 F 7/2/2016 7:14 0:35:17 11:01.6 2800 24 161 01:58.1 1030 212
175 F 7/2/2016 7:14 0:35:17 10:57.7 5600 23 171 01:57.4 1043 216
176 F 7/2/2016 7:14 0:35:17 11:24.5 8400 22 169 02:02.2 959 192
177 F 7/2/2016 7:14 0:35:17 01:53.4 8855 21 167 02:04.6 922 181
178
179 F 7/1/2016 7:32 10000m 40:15.0 10000 23 176 02:00.7 984 199
180 F 7/1/2016 7:32 10000m 07:54.1 2000 24 168 01:58.5 1023 210
181 F 7/1/2016 7:32 10000m 07:50.3 4000 24 174 01:57.5 1041 215
182 F 7/1/2016 7:32 10000m 08:24.4 6000 22 177 02:06.1 900 175
183 F 7/1/2016 7:32 10000m 08:02.3 8000 23 180 02:00.5 987 200
184 F 7/1/2016 7:32 10000m 08:03.9 10000 22 182 02:00.9 980 198
185
186 F 7/1/2016 6:51 0:02:29 02:29.5 529 20 130 02:21.3 726 124
187 F 7/1/2016 6:51 0:02:29 02:29.5 529 20 130 02:21.3 726 124
188
189 F 6/28/2016 6:45 0:30:00 30:00.0 7434 22 173 02:01.0 978 197
190 F 6/28/2016 6:45 0:30:00 06:00.0 1534 23 0 01:57.3 1045 217
191 F 6/28/2016 6:45 0:30:00 12:00.0 1342 21 166 02:14.1 799 145
192 F 6/28/2016 6:45 0:30:00 18:00.0 1523 24 172 01:58.1 1029 212
193 F 6/28/2016 6:45 0:30:00 24:00.0 1484 22 173 02:01.2 975 196
194 F 6/28/2016 6:45 0:30:00 30:00.0 1552 23 184 01:55.9 1072 224
195
196 F 6/25/2016 7:10 0:30:00 30:00.0 7675 23 171 01:57.2 1047 217
197 F 6/25/2016 7:10 0:30:00 06:00.0 1542 24 159 01:56.7 1057 220
198 F 6/25/2016 7:10 0:30:00 12:00.0 1527 24 165 01:57.8 1035 214
199 F 6/25/2016 7:10 0:30:00 18:00.0 1550 24 174 01:56.1 1069 223
200 F 6/25/2016 7:10 0:30:00 24:00.0 1502 23 179 01:59.8 999 203
201 F 6/25/2016 7:10 0:30:00 30:00.0 1556 23 181 01:55.6 1078 226
202
203 F 6/9/2016 7:17 0:30:00 30:00.0 7299 22 166 02:03.3 942 187
204 F 6/9/2016 7:17 0:30:00 06:00.0 1563 24 168 01:55.1 1088 229
205 F 6/9/2016 7:17 0:30:00 12:00.0 1463 22 166 02:03.0 946 188
206 F 6/9/2016 7:17 0:30:00 18:00.0 1371 21 160 02:11.2 832 155
207 F 6/9/2016 7:17 0:30:00 24:00.0 1434 22 165 02:05.5 909 177
208 F 6/9/2016 7:17 0:30:00 30:00.0 1468 22 174 02:02.6 953 190
209
210 F 5/24/2016 7:42 0:30:00 30:00.0 7612 23 168 01:58.2 1028 212
211 F 5/24/2016 7:42 0:30:00 06:00.0 1534 24 158 01:57.3 1045 217
212 F 5/24/2016 7:42 0:30:00 12:00.0 1491 23 167 02:00.7 984 199
213 F 5/24/2016 7:42 0:30:00 18:00.0 1507 23 169 01:59.4 1006 205
214 F 5/24/2016 7:42 0:30:00 24:00.0 1532 23 171 01:57.4 1042 216
215 F 5/24/2016 7:42 0:30:00 30:00.0 1548 23 175 01:56.2 1066 223
216
217 F 5/7/2016 9:14 0:30:00 30:00.0 7540 23 164 01:59.3 1008 206
218 F 5/7/2016 9:14 0:30:00 06:00.0 1493 23 162 02:00.5 987 200
219 F 5/7/2016 9:14 0:30:00 12:00.0 1517 23 161 01:58.6 1021 210
220 F 5/7/2016 9:14 0:30:00 18:00.0 1498 23 165 02:00.1 994 202
221 F 5/7/2016 9:14 0:30:00 24:00.0 1506 23 164 01:59.5 1005 205
222 F 5/7/2016 9:14 0:30:00 30:00.0 1526 23 169 01:57.9 1033 213
223
224 F 4/23/2016 8:17 10000m 39:24.5 10000 23 166 01:58.2 1028 212
225 F 4/23/2016 8:17 10000m 07:47.0 2000 24 161 01:56.7 1056 220
226 F 4/23/2016 8:17 10000m 07:44.2 4000 24 171 01:56.0 1070 224
227 F 4/23/2016 8:17 10000m 07:50.7 6000 23 165 01:57.6 1039 215
228 F 4/23/2016 8:17 10000m 08:09.4 8000 22 165 02:02.3 957 191
229 F 4/23/2016 8:17 10000m 07:53.1 10000 23 170 01:58.2 1028 212
230
231 F 4/20/2016 8:12 0:30:00 30:00.0 7961 24 170 01:53.0 1133 242
232 F 4/20/2016 8:12 0:30:00 06:00.0 1583 24 160 01:53.7 1119 238
233 F 4/20/2016 8:12 0:30:00 12:00.0 1590 24 163 01:53.2 1130 241
234 F 4/20/2016 8:12 0:30:00 18:00.0 1594 24 173 01:52.9 1136 243
235 F 4/20/2016 8:12 0:30:00 24:00.0 1589 25 174 01:53.2 1128 241
236 F 4/20/2016 8:12 0:30:00 30:00.0 1607 24 182 01:52.0 1157 249
237
238 F 4/18/2016 8:24 0:30:00 30:00.0 7608 22 167 01:58.2 1027 211
239 F 4/18/2016 8:24 0:30:00 06:00.0 1508 22 154 01:59.3 1008 206
240 F 4/18/2016 8:24 0:30:00 12:00.0 1468 21 161 02:02.6 953 190
241 F 4/18/2016 8:24 0:30:00 18:00.0 1542 23 171 01:56.7 1057 220
242 F 4/18/2016 8:24 0:30:00 24:00.0 1544 23 172 01:56.5 1060 221
243 F 4/18/2016 8:24 0:30:00 30:00.0 1546 23 179 01:56.4 1063 222
244
245 F 4/17/2016 9:32 0:35:33 35:33.4 8406 21 152 02:06.8 889 171
246 F 4/17/2016 9:32 0:35:33 11:25.3 2800 22 162 02:02.3 957 191
247 F 4/17/2016 9:32 0:35:33 11:02.1 5600 22 171 01:58.2 1028 212
248 F 4/17/2016 9:32 0:35:33 13:03.3 8400 19 138 02:19.8 740 128
249 F 4/17/2016 9:32 0:35:33 00:02.7 8406 0 138 03:45.0 405 31
250
251 F 4/11/2016 8:29 0:37:24 37:24.9 9267 22 170 02:01.1 977 197
252 F 4/11/2016 8:29 0:37:24 11:03.9 2800 23 169 01:58.5 1022 210
253 F 4/11/2016 8:29 0:37:24 11:21.7 5600 22 171 02:01.7 967 194
254 F 4/11/2016 8:29 0:37:24 11:30.7 8400 22 170 02:03.3 941 187
255 F 4/11/2016 8:29 0:37:24 03:28.8 9267 22 170 02:00.4 989 200

1246
rowers/upload_tasks.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,6 @@ from rowers.mytypes import workouttypes, boattypes, ergtypes, otwtypes, workouts
from rowers.rower_rules import is_promember
from rowers.integrations import *
from rowers.utils import (
geo_distance, serialize_list, deserialize_list, uniqify,
str2bool, range_to_color_hex, absolute, myqueue, NoTokenError
@@ -104,7 +103,7 @@ def make_plot(r, w, f1, f2, plottype, title, imagename='', plotnr=0):
otwrange = [r.fastpaceotw.total_seconds(), r.slowpaceotw.total_seconds()]
oterange = [r.fastpaceerg.total_seconds(), r.slowpaceerg.total_seconds()]
job = myqueue(queuehigh, handle_makeplot, f1, f2,
job = myqueue(queue, handle_makeplot, f1, f2,
title, hrpwrdata,
plotnr, imagename, gridtrue=gridtrue, axis=axis,
otwrange=otwrange, oterange=oterange)
@@ -130,6 +129,11 @@ def make_plot(r, w, f1, f2, plottype, title, imagename='', plotnr=0):
def do_sync(w, options, quick=False):
from rowers.integrations import (
C2Integration, IntervalsIntegration,
SportTracksIntegration, TPIntegration,
StravaIntegration,
)
if w.duplicate:
return 0

View File

@@ -15,6 +15,7 @@ from rest_framework.decorators import parser_classes
from rest_framework.parsers import BaseParser
from rowers.utils import geo_distance
from rowers.dataflow import upload_handler
from datetime import datetime as dt
@@ -467,7 +468,6 @@ def strokedata_rowingdata(request):
filename, completefilename = handle_uploaded_file(f)
uploadoptions = {
'secret': settings.UPLOAD_SERVICE_SECRET,
'user': r.user.id,
'file': completefilename,
'workouttype': form.cleaned_data['workouttype'],
@@ -477,14 +477,18 @@ def strokedata_rowingdata(request):
'notes': form.cleaned_data['notes']
}
url = settings.UPLOAD_SERVICE_URL
result = upload_handler(uploadoptions, completefilename, createworkout=True)
if result['status'] != 'processing':
dologging('apilog.log','Error in strokedata_rowingdata:')
dologging('apilog.log',result)
return JsonResponse(
result,
status=500
)
_ = myqueue(queuehigh,
handle_request_post,
url,
uploadoptions)
workoutid = result.get('job_id',0)
response = JsonResponse(
{
{"workout public id": workoutid,
"status": "success",
}
)
@@ -518,7 +522,6 @@ def strokedata_rowingdata_apikey(request):
filename, completefilename = handle_uploaded_file(f)
uploadoptions = {
'secret': settings.UPLOAD_SERVICE_SECRET,
'user': r.user.id,
'file': completefilename,
'workouttype': form.cleaned_data['workouttype'],
@@ -528,17 +531,22 @@ def strokedata_rowingdata_apikey(request):
'notes': form.cleaned_data['notes']
}
url = settings.UPLOAD_SERVICE_URL
result = upload_handler(uploadoptions, completefilename, createworkout=True)
_ = myqueue(queuehigh,
handle_request_post,
url,
uploadoptions)
if result['status'] != 'processing':
dologging('apilog.log','Error in strokedata_rowingdata_apikey:')
dologging('apilog.log',result)
return JsonResponse(
result,
status=500
)
workoutid = result.get('job_id',0)
response = JsonResponse(
{
{"workout public id": workoutid,
"status": "success",
}
)
response.status_code = 201
return response
@@ -614,7 +622,6 @@ def strokedata_fit(request):
)
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'user': request.user.id,
'file': fit_filename,
'boattype': '1x',
@@ -626,20 +633,18 @@ def strokedata_fit(request):
'offline': False,
}
url = UPLOAD_SERVICE_URL
_ = myqueue(queuehigh,
handle_request_post,
url,
uploadoptions)
result = upload_handler(uploadoptions, fit_filename)
dologging('apilog.log','FIT file uploaded, returning response')
returndict = {
if result.get('status','') != 'processing':
return JsonResponse(result, status=500)
workoutid = result.get('job_id',0)
return JsonResponse(
{"workout public id": workoutid,
"status": "success",
"workout public id": encoder.encode_hex(w.id),
"workout id": w.id,
}
return JsonResponse(returndict, status=201)
})
except Exception as e:
dologging('apilog.log','FIT API endpoint')
dologging('apilog.log',e)
@@ -736,7 +741,6 @@ def strokedata_tcx(request):
# need workouttype, duration
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'user': request.user.id,
'file': tcxfilename,
'id': w.id,
@@ -748,16 +752,14 @@ def strokedata_tcx(request):
'offline': False,
}
result = upload_handler(uploadoptions, tcxfilename)
if result.get('status','') != 'processing':
return JsonResponse(result, status=500)
_ = myqueue(queuehigh,
handle_post_workout_api,
uploadoptions)
workoutid = w.id
workoutid = result.get('job_id',0)
return JsonResponse(
{"workout public id": encoder.encode_hex(workoutid),
"workout id": workoutid,
{"workout public id": workoutid,
"status": "success",
})
except Exception as e: # pragma: no cover
@@ -777,7 +779,7 @@ def strokedatajson_v3(request):
"""
POST: Add Stroke data to workout
GET: Get stroke data of workout
This v2 API works on stroke based data dict:
This v3 API works on stroke based data dict:
{
"distance": 2100,
"elapsedTime": 592,
@@ -884,7 +886,6 @@ def strokedatajson_v3(request):
w.save()
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'user': request.user.id,
'file': csvfilename,
'title': title,
@@ -898,10 +899,9 @@ def strokedatajson_v3(request):
'id': w.id,
}
_ = myqueue(queuehigh,
handle_post_workout_api,
uploadoptions)
result = upload_handler(uploadoptions, csvfilename)
if result.get('status','') != 'processing':
return JsonResponse(result, status=500)
workoutid = w.id

View File

@@ -5,7 +5,7 @@ from rowsandall_app.settings import (
from rowers.views.statements import *
from rowers.plannedsessions import get_dates_timeperiod
from rowers.tasks import fetch_strava_workout
from rowers.utils import NoTokenError
from rowers.models import PlannedSession

View File

@@ -255,14 +255,12 @@ from rowers.plannedsessions import *
from rowers.tasks import handle_makeplot, handle_otwsetpower, handle_sendemailtcx, handle_sendemailcsv
from rowers.tasks import (
handle_intervals_updateworkout,
handle_post_workout_api,
handle_sendemail_newftp,
instroke_static,
fetch_rojabo_session,
handle_sendemail_unrecognized, handle_sendemailnewcomment,
handle_request_post,
handle_sendemailsummary,
handle_rp3_async_workout,
handle_send_template_email,
handle_send_disqualification_email,
handle_send_withdraw_email,
@@ -278,11 +276,17 @@ from rowers.tasks import (
handle_sendemail_racesubmission,
handle_sendemail_optout,
handle_sendemail_ical,
handle_c2_async_workout,
handle_send_email_instantplan_notification,
handle_nk_async_workout,
check_tp_workout_id,
)
from rowers.upload_tasks import (
handle_assignworkouts,
handle_post_workout_api,
handle_c2_async_workout,
handle_rp3_async_workout,
handle_sporttracks_workout_from_data,
handle_split_workout_by_intervals,
)

View File

@@ -23,6 +23,7 @@ def default(o): # pragma: no cover
return int(o)
raise TypeError
from rowers.dataflow import upload_handler
def get_video_id(url):
"""Returns Video_ID extracting from the given url of Youtube
@@ -5263,16 +5264,10 @@ def workout_upload_view(request,
'upload_to_C2': False,
'plottype': 'timeplot',
'landingpage': 'workout_edit_view',
},
docformoptions={
'workouttype': 'rower',
},
raceid=0):
is_ajax = request.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
if settings.TESTING:
is_ajax = False
r = getrower(request.user)
if r.imports_are_private:
uploadoptions['makeprivate'] = True
@@ -5290,418 +5285,53 @@ def workout_upload_view(request,
if 'uploadoptions' in request.session:
uploadoptions = request.session['uploadoptions']
try:
_ = uploadoptions['landingpage']
except KeyError: # pragma: no cover
uploadoptions['landingpage'] = r.defaultlandingpage
else:
request.session['uploadoptions'] = uploadoptions
if 'docformoptions' in request.session:
docformoptions = request.session['docformoptions']
else:
request.session['docformoptions'] = docformoptions
form = DocumentsForm(initial=uploadoptions)
optionsform = UploadOptionsForm(initial=uploadoptions,
request=request, raceid=raceid)
makeprivate = uploadoptions.get('makeprivate', False)
make_plot = uploadoptions.get('make_plot', False)
workouttype = uploadoptions.get('WorkoutType', 'rower')
boattype = docformoptions.get('boattype', '1x')
try:
rpe = docformoptions['rpe']
try: # pragma: no cover
rpe = int(rpe)
except ValueError: # pragma: no cover
rpe = 0
if not rpe: # pragma: no cover
rpe = -1
except KeyError:
rpe = -1
notes = docformoptions.get('notes', '')
workoutsource = uploadoptions.get('workoutsource', None)
plottype = uploadoptions.get('plottype', 'timeplot')
landingpage = uploadoptions.get('landingpage', r.defaultlandingpage)
upload_to_c2 = uploadoptions.get('upload_to_C2', False)
upload_to_strava = uploadoptions.get('upload_to_Strava', False)
upload_to_st = uploadoptions.get('upload_to_SportTracks', False)
upload_to_tp = uploadoptions.get('upload_to_TrainingPeaks', False)
upload_to_intervals = uploadoptions.get('upload_to_Intervals', False)
response = {}
if request.method == 'POST':
form = DocumentsForm(request.POST, request.FILES)
optionsform = UploadOptionsForm(request.POST, request=request)
if form.is_valid():
# f = request.FILES['file']
f = form.cleaned_data['file']
if f is not None:
res = handle_uploaded_file(f)
else: # pragma: no cover
messages.error(request,
"Something went wrong - no file attached")
url = reverse('workout_upload_view')
if is_ajax:
return JSONResponse({'result': 0, 'url': 0})
else:
return HttpResponseRedirect(url)
t = form.cleaned_data['title']
workouttype = form.cleaned_data['workouttype']
boattype = form.cleaned_data['boattype']
try:
rpe = form.cleaned_data['rpe']
try:
rpe = int(rpe)
except ValueError:
rpe = 0
except KeyError: # pragma: no cover
rpe = -1
request.session['docformoptions'] = {
'workouttype': workouttype,
'boattype': boattype,
}
notes = form.cleaned_data['notes']
offline = form.cleaned_data['offline']
registrationid = 0
if optionsform.is_valid():
make_plot = optionsform.cleaned_data['make_plot']
plottype = optionsform.cleaned_data['plottype']
upload_to_c2 = optionsform.cleaned_data['upload_to_C2']
upload_to_strava = optionsform.cleaned_data['upload_to_Strava']
upload_to_st = optionsform.cleaned_data['upload_to_SportTracks']
upload_to_tp = optionsform.cleaned_data['upload_to_TrainingPeaks']
upload_to_intervals = optionsform.cleaned_data['upload_to_Intervals']
makeprivate = optionsform.cleaned_data['makeprivate']
landingpage = optionsform.cleaned_data['landingpage']
raceid = optionsform.cleaned_data['raceid']
try:
registrationid = optionsform.cleaned_data['submitrace']
except KeyError:
registrationid = 0
uploadoptions = {
'makeprivate': makeprivate,
'make_plot': make_plot,
'plottype': plottype,
'upload_to_C2': upload_to_c2,
'upload_to_Strava': upload_to_strava,
'upload_to_SportTracks': upload_to_st,
'upload_to_TrainingPeaks': upload_to_tp,
'upload_to_Intervals': upload_to_intervals,
'landingpage': landingpage,
'boattype': boattype,
'rpe': rpe,
'workouttype': workouttype,
}
if form.is_valid() and optionsform.is_valid():
uploadoptions = form.cleaned_data.copy()
uploadoptions.update(optionsform.cleaned_data)
request.session['uploadoptions'] = uploadoptions
f1 = res[0] # file name
f2 = res[1] # file name incl media directory
if not offline:
id, message, f2 = dataprep.new_workout_from_file(
r, f2,
workouttype=workouttype,
workoutsource=workoutsource,
boattype=boattype,
rpe=rpe,
makeprivate=makeprivate,
title=t,
notes=notes,
)
else:
uploadoptions['secret'] = settings.UPLOAD_SERVICE_SECRET
uploadoptions['user'] = r.user.id
uploadoptions['title'] = t
uploadoptions['file'] = f2
if 'file' in request.FILES and request.FILES['file'] is not None:
filename, file_path = handle_uploaded_file(request.FILES['file'])
else:
messages.error(request,"No file attached")
return HttpResponseRedirect(reverse("workout_upload_view"))
url = settings.UPLOAD_SERVICE_URL
uploadoptions['file'] = file_path
_ = myqueue(queuehigh,
handle_request_post,
url,
uploadoptions
)
messages.info(
request,
"The file was too large to process in real time."
" It will be processed in a background process."
" You will receive an email when it is ready")
response = upload_handler(uploadoptions, file_path)
if response["status"] not in ["processing"]:
messages.error(request, response["message"])
url = reverse('workout_upload_view')
if is_ajax: # pragma: no cover
return JSONResponse({'result': 1, 'url': url})
return HttpResponseRedirect(url)
else:
response = HttpResponseRedirect(url)
return response
messages.info(request, response["message"])
if not id: # pragma: no cover
messages.error(request, message)
url = reverse('workout_upload_view')
if is_ajax: # pragma: no cover
return JSONResponse({'result': 0, 'url': url})
# redirect to workouts_view
url = reverse('workouts_view')
return HttpResponseRedirect(url)
else:
response = HttpResponseRedirect(url)
return response
elif id == -1: # pragma: no cover
message = 'The zip archive will be processed in the background." \
" The files in the archive will only be uploaded without the extra actions." \
" You will receive email when the workouts are ready.'
messages.info(request, message)
url = reverse('workout_upload_view')
if is_ajax:
return JSONResponse({'result': 1, 'url': url})
else:
response = HttpResponseRedirect(url)
return response
else:
if message: # pragma: no cover
messages.error(request, message)
messages.error(request, "error")
w = Workout.objects.get(id=id)
url = reverse('workout_edit_view',
kwargs={
'id': encoder.encode_hex(w.id),
})
if is_ajax: # pragma: no cover
response = {'result': 1, 'url': url}
else:
response = HttpResponseRedirect(url)
r = getrower(request.user)
if (make_plot): # pragma: no cover
res, jobid = uploads.make_plot(r, w, f1, f2, plottype, t)
if res == 0:
messages.error(request, jobid)
else:
try:
request.session['async_tasks'] += [
(jobid, 'make_plot')]
except KeyError:
request.session['async_tasks'] = [(jobid, 'make_plot')]
elif r.staticchartonupload is not None:
plottype = r.staticchartonupload
res, jobid = uploads.make_plot(r, w, f1, f2, plottype, t)
# upload to C2
if (upload_to_c2): # pragma: no cover
try:
c2integration = C2Integration(request.user)
id = c2integration.workout_export(w)
except NoTokenError:
id = 0
message = "Something went wrong with the Concept2 sync"
messages.error(request, message)
if (upload_to_strava): # pragma: no cover
strava_integration = StravaIntegration(request.user)
try:
id = strava_integration.workout_export(w)
except NoTokenError:
id = 0
message = "Please connect to Strava first"
messages.error(request, message)
if (upload_to_st): # pragma: no cover
st_integration = SportTracksIntegration(request.user)
try:
id = st_integration.workout_export(w)
except NoTokenError:
message = "Please connect to SportTracks first"
id = 0
messages.error(request, message)
if (upload_to_tp): # pragma: no cover
tp_integration = TPIntegration(request.user)
try:
id = tp_integration.workout_export(w)
except NoTokenError:
message = "Please connect to TrainingPeaks first"
messages.error(request, message)
if (upload_to_intervals):
intervals_integration = IntervalsIntegration(request.user)
try:
id = intervals_integration.workout_export(w)
except NoTokenError:
message = "Please connect to Intervals.icu first"
messages.error(request, message)
if int(registrationid) < 0: # pragma: no cover
race = VirtualRace.objects.get(id=-int(registrationid))
if race.sessiontype == 'race':
result, comments, errors, jobid = add_workout_race(
[w], race, r, doregister=True,
)
if result:
messages.info(
request,
"We have submitted your workout to the race")
for c in comments:
messages.info(request, c)
for er in errors:
messages.error(request, er)
elif race.sessiontype == 'indoorrace':
result, comments, errors, jobid = add_workout_indoorrace(
[w], race, r, doregister=True,
)
if result:
messages.info(
request,
"We have submitted your workout to the race")
for c in comments:
messages.info(request, c)
for er in errors:
messages.error(request, er)
elif race.sessiontype in ['fastest_time', 'fastest_distance']:
result, comments, errors, jobid = add_workout_fastestrace(
[w], race, r, doregister=True,
)
if result:
messages.info(
request, "We have submitted your workout to the race")
for c in comments:
messages.info(request, c)
for er in errors:
messages.error(request, er)
if int(registrationid) > 0: # pragma: no cover
races = VirtualRace.objects.filter(
registration_closure__gt=timezone.now()
)
if raceid != 0:
races = VirtualRace.objects.filter(
registration_closure__gt=timezone.now(),
id=raceid,
)
registrations = IndoorVirtualRaceResult.objects.filter(
race__in=races,
id=registrationid,
userid=r.id,
)
registrations2 = VirtualRaceResult.objects.filter(
race__in=races,
id=registrationid,
userid=r.id,
)
if int(registrationid) in [r.id for r in registrations]: # pragma: no cover
# indoor race
registrations = registrations.filter(id=registrationid)
if registrations:
race = registrations[0].race
if race.sessiontype == 'indoorrace':
result, comments, errors, jobid = add_workout_indoorrace(
[w], race, r, recordid=registrations[0].id
)
elif race.sessiontype in ['fastest_time', 'fastest_distance']:
result, comments, errors, jobid = add_workout_fastestrace(
[w], race, r, recordid=registrations[0].id
)
if result:
messages.info(
request,
"We have submitted your workout to the race")
for c in comments:
messages.info(request, c)
for er in errors:
messages.error(request, er)
if int(registrationid) in [r.id for r in registrations2]: # pragma: no cover
# race
registrations = registrations2.filter(id=registrationid)
if registrations:
race = registrations[0].race
if race.sessiontype == 'race':
result, comments, errors, jobid = add_workout_race(
[w], race, r, recordid=registrations[0].id
)
elif race.sessiontype in ['fastest_time', 'fastest_distance']:
result, comments, errors, jobid = add_workout_fastestrace(
[w], race, r, recordid=registrations[0].id
)
if result:
messages.info(
request,
"We have submitted your workout to the race")
for c in comments:
messages.info(request, c)
for er in errors:
messages.error(request, er)
if registrationid != 0: # pragma: no cover
try:
url = reverse('virtualevent_view',
kwargs={
'id': race.id,
})
except UnboundLocalError:
if landingpage != 'workout_upload_view':
url = reverse(landingpage,
kwargs={
'id': encoder.encode_hex(w.id),
})
else: # pragma: no cover
url = reverse(landingpage)
elif landingpage != 'workout_upload_view': # pragma: no cover
url = reverse(landingpage,
kwargs={
'id': encoder.encode_hex(w.id),
})
else: # pragma: no cover
url = reverse(landingpage)
if is_ajax: # pragma: no cover
response = {'result': 1, 'url': url}
else:
response = HttpResponseRedirect(url)
else:
if not is_ajax: # pragma: no cover
response = render(request,
'document_form.html',
{'form': form,
'teams': get_my_teams(request.user),
'optionsform': optionsform,
})
if is_ajax: # pragma: no cover
return JSONResponse(response)
else:
return response
else:
if not is_ajax:
form = DocumentsForm(initial=docformoptions)
optionsform = UploadOptionsForm(initial=uploadoptions,
request=request, raceid=raceid)
return render(request, 'document_form.html',
return render(request, 'file_upload.html',
{'form': form,
'active': 'nav-workouts',
'breadcrumbs': breadcrumbs,
'teams': get_my_teams(request.user),
'optionsform': optionsform,
})
else: # pragma: no cover
return {'result': 0}
# This is the main view for processing uploaded files
@user_passes_test(ispromember, login_url="/rowers/paidplans", redirect_field_name=None,
@@ -5713,6 +5343,8 @@ def team_workout_upload_view(request, userid=0, message="",
'plottype': 'timeplot',
}):
r = getrower(request.user)
if 'uploadoptions' in request.session:
uploadoptions = request.session['uploadoptions']
else:
@@ -5732,148 +5364,7 @@ def team_workout_upload_view(request, userid=0, message="",
make_plot = uploadoptions['make_plot']
plottype = uploadoptions['plottype']
r = getrower(request.user)
if request.method == 'POST':
form = DocumentsForm(request.POST, request.FILES)
optionsform = TeamUploadOptionsForm(request.POST)
rowerform = TeamInviteForm(request.POST)
rowerform.fields.pop('email')
rowers = Rower.objects.filter(
coachinggroups__in=[r.mycoachgroup]
).distinct()
rowerform.fields['user'].queryset = User.objects.filter(
rower__in=rowers).distinct()
rowerform.fields['user'].required = True
if form.is_valid() and rowerform.is_valid():
f = request.FILES.get('file', False)
if f:
res = handle_uploaded_file(f)
else: # pragma: no cover
messages.error(request, 'No file attached')
response = render(request,
'team_document_form.html',
{'form': form,
'teams': get_my_teams(request.user),
'optionsform': optionsform,
'rowerform': rowerform,
})
return response
t = form.cleaned_data['title']
offline = form.cleaned_data['offline']
boattype = form.cleaned_data['boattype']
workouttype = form.cleaned_data['workouttype']
if rowerform.is_valid():
u = rowerform.cleaned_data['user']
r = getrower(u)
if not can_add_workout_member(request.user, r): # pragma: no cover
message = 'Please select a rower'
messages.error(request, message)
messages.info(request, successmessage)
response = render(request,
'team_document_form.html',
{'form': form,
'teams': get_my_teams(request.user),
'optionsform': optionsform,
'rowerform': rowerform,
})
return response
workouttype = form.cleaned_data['workouttype']
if optionsform.is_valid():
make_plot = optionsform.cleaned_data['make_plot']
plottype = optionsform.cleaned_data['plottype']
uploadoptions = {
'makeprivate': False,
'make_plot': make_plot,
'plottype': plottype,
'upload_to_C2': False,
}
request.session['uploadoptions'] = uploadoptions
f1 = res[0] # file name
f2 = res[1] # file name incl media directory
if not offline:
id, message, f2 = dataprep.new_workout_from_file(
r, f2,
workouttype=workouttype,
boattype=boattype,
makeprivate=False,
title=t,
notes=''
)
else: # pragma: no cover
_ = myqueue(
queuehigh,
handle_zip_file,
r.user.email,
t,
f2,
emailbounced=r.emailbounced
)
messages.info(
request,
"The file was too large to process in real time."
" It will be processed in a background process."
" The user will receive an email when it is ready"
)
url = reverse('team_workout_upload_view')
response = HttpResponseRedirect(url)
return response
if not id: # pragma: no cover
messages.error(request, message)
url = reverse('team_workout_upload_view')
response = HttpResponseRedirect(url)
return response
elif id == -1: # pragma: no cover
message = 'The zip archive will be processed in the background." \
" The files in the archive will only be uploaded without the extra actions." \
" You will receive email when the workouts are ready.'
messages.info(request, message)
url = reverse('team_workout_upload_view')
response = HttpResponseRedirect(url)
return response
else:
successmessage = "The workout was added to the user's account"
messages.info(request, successmessage)
url = reverse('team_workout_upload_view')
response = HttpResponseRedirect(url)
w = Workout.objects.get(id=id)
r = getrower(request.user)
if (make_plot): # pragma: no cover
id, jobid = uploads.make_plot(r, w, f1, f2, plottype, t)
elif r.staticchartonupload:
plottype = r.staticchartonupload
id, jobid = uploads.make_plot(r, w, f1, f2, plottype, t)
else:
response = render(request,
'team_document_form.html',
{'form': form,
'teams': get_my_teams(request.user),
'active': 'nav-workouts',
'breadcrumbs': breadcrumbs,
'optionsform': optionsform,
'rowerform': rowerform,
})
return response
else:
form = DocumentsForm()
form = DocumentsForm(initial=uploadoptions)
optionsform = TeamUploadOptionsForm(initial=uploadoptions)
rowerform = TeamInviteForm(userid=userid)
rowerform.fields.pop('email')
@@ -5885,16 +5376,66 @@ def team_workout_upload_view(request, userid=0, message="",
rowerform.fields['user'].queryset = User.objects.filter(
rower__in=rowers).distinct()
return render(request, 'team_document_form.html',
r = getrower(request.user)
if request.method == 'POST':
form = DocumentsForm(request.POST, request.FILES)
optionsform = TeamUploadOptionsForm(request.POST)
rowerform = TeamInviteForm(request.POST)
rowerform.fields.pop('email')
rowers = Rower.objects.filter(
coachinggroups__in=[r.mycoachgroup]
).distinct()
rowerform.fields['user'].queryset = User.objects.filter(
rower__in=rowers).distinct()
rowerform.fields['user'].required = True
if form.is_valid() and rowerform.is_valid() and optionsform.is_valid():
uploadoptions = form.cleaned_data.copy()
uploadoptions.update(optionsform.cleaned_data)
uploadoptions.update(rowerform.cleaned_data)
request.session['uploadoptions'] = uploadoptions
if 'file' in request.FILES and request.FILES['file'] is not None:
filename, file_path = handle_uploaded_file(request.FILES['file'])
else:
messages.error(request,"No file attached")
return HttpResponseRedirect(reverse("team_workout_upload_view"))
uploadoptions['file'] = file_path
u = rowerform.cleaned_data['user']
r = getrower(u)
if not can_add_workout_member(request.user, r): # pragma: no cover
message = 'Please select a rower'
messages.error(request, message)
uploadoptions['user'] = u.id
response = upload_handler(uploadoptions, file_path)
if response["status"] not in ["processing"]:
messages.error(request, response["message"])
url = reverse('team_workout_upload_view')
return HttpResponseRedirect(url)
else:
messages.info(request, response["message"])
# redirect to workouts_view
url = reverse('team_workout_upload_view')
return HttpResponseRedirect(url)
else:
messages.error(request, "error")
response = render(request,
'team_document_form.html',
{'form': form,
# 'teams':get_my_teams(request.user),
'optionsform': optionsform,
'teams': get_my_teams(request.user),
'active': 'nav-workouts',
'breadcrumbs': breadcrumbs,
# 'rower':r,
'optionsform': optionsform,
'rowerform': rowerform,
})
return response
# A page with all the recent graphs (searchable on workout name)
@login_required()