880 lines
28 KiB
Python
880 lines
28 KiB
Python
from .integrations import SyncIntegration, NoTokenError, create_or_update_syncrecord, get_known_ids
|
|
from rowers.models import Rower, User, Workout, TombStone, PlannedSession
|
|
from rowingdata import rowingdata
|
|
from rowingdata import FITParser as FP
|
|
from rowingdata.otherparsers import FitSummaryData
|
|
#from rowers.rower_rules import user_is_not_basic, user_is_coachee
|
|
from rowers.dataroutines import totaltime_sec_to_string
|
|
|
|
from rowers import mytypes
|
|
import shutil
|
|
from rowers.rower_rules import is_workout_user, ispromember
|
|
from rowers.utils import myqueue, dologging, custom_exception_handler
|
|
from rowers.tasks import handle_intervals_getworkout, handle_request_post
|
|
|
|
import urllib
|
|
import gzip
|
|
import requests
|
|
import arrow
|
|
import datetime
|
|
import os
|
|
from uuid import uuid4
|
|
from django.utils import timezone
|
|
from datetime import timedelta
|
|
import rowers.dataprep as dataprep
|
|
from rowers.opaque import encoder
|
|
|
|
from rowsandall_app.settings import (
|
|
INTERVALS_CLIENT_ID, INTERVALS_REDIRECT_URI, INTERVALS_CLIENT_SECRET, SITE_URL,
|
|
UPLOAD_SERVICE_SECRET, UPLOAD_SERVICE_URL
|
|
)
|
|
|
|
import django_rq
|
|
queue = django_rq.get_queue('default', default_timeout=3600)
|
|
queuelow = django_rq.get_queue('low', default_timeout=3600)
|
|
queuehigh = django_rq.get_queue('high', default_timeout=3600)
|
|
|
|
|
|
def seconds_to_duration(seconds):
|
|
hours = seconds // 3600
|
|
minutes = (seconds % 3600) // 60
|
|
remaining_seconds = seconds % 60
|
|
|
|
# Format as "H:MM:SS" or "MM:SS" if no hours
|
|
if hours > 0:
|
|
return f"{int(hours)}:{int(minutes):02}:{int(remaining_seconds):02}"
|
|
else:
|
|
return f"{int(minutes)}:{int(remaining_seconds):02}"
|
|
|
|
headers = {
|
|
'Content-Type': 'application/json',
|
|
'Accept': 'application/json'
|
|
}
|
|
|
|
intervals_authorize_url = 'https://intervals.icu/oauth/authorize?'
|
|
intervals_token_url = 'https://intervals.icu/api/oauth/token'
|
|
|
|
webhookverification = 'JA9Vt6RNH10'
|
|
|
|
class IntervalsIntegration(SyncIntegration):
|
|
def __init__(self, *args, **kwargs):
|
|
super(IntervalsIntegration, self).__init__(*args, **kwargs)
|
|
self.oauth_data = {
|
|
'client_id': INTERVALS_CLIENT_ID,
|
|
'client_secret': INTERVALS_CLIENT_SECRET,
|
|
'redirect_uri': INTERVALS_REDIRECT_URI,
|
|
'authorization_uri': intervals_authorize_url,
|
|
'content_type': 'application/json',
|
|
'tokenname': 'intervals_token',
|
|
'expirydatename': 'intervals_exp',
|
|
'refreshtokenname': 'intervals_r',
|
|
'bearer_auth': True,
|
|
'base_url': 'https://intervals.icu/api/v1/',
|
|
'grant_type': 'refresh_token',
|
|
'headers': headers,
|
|
'scope': 'ACTIVITY:WRITE, LIBRARY:READ, CALENDAR:WRITE',
|
|
}
|
|
|
|
def get_token(self, code, *args, **kwargs):
|
|
post_data = {
|
|
'client_id': str(self.oauth_data['client_id']),
|
|
'client_secret': self.oauth_data['client_secret'],
|
|
'code': code,
|
|
}
|
|
|
|
response = requests.post(
|
|
intervals_token_url,
|
|
data=post_data,
|
|
)
|
|
|
|
if response.status_code not in [200, 201]:
|
|
dologging('intervals.icu.log',response.text)
|
|
return [0,"Failed to get token. ",0]
|
|
|
|
token_json = response.json()
|
|
access_token = token_json['access_token']
|
|
athlete = token_json['athlete']
|
|
|
|
return [access_token, athlete, '']
|
|
|
|
def get_name(self):
|
|
return 'Intervals'
|
|
|
|
def get_shortname(self):
|
|
return 'intervals'
|
|
|
|
def open(self, *args, **kwargs):
|
|
# dologging('intervals.icu.log', "Getting token for user {id}".format(id=self.rower.id))
|
|
token = super(IntervalsIntegration, self).open(*args, **kwargs)
|
|
return token
|
|
|
|
def createworkoutdata(self, w, *args, **kwargs) -> str:
|
|
dozip = kwargs.get('dozip', True)
|
|
# resample if wanted by user, not tested
|
|
if w.user.intervals_resample_to_1s:
|
|
datadf, id, msgs = dataprep.resample(
|
|
w.id, w.user, w, overwrite=False
|
|
)
|
|
w_resampled = Workout.objects.get(id=id)
|
|
filename = w_resampled.csvfilename
|
|
else:
|
|
w_resampled = None
|
|
filename = w.csvfilename
|
|
try:
|
|
row = rowingdata(csvfile=filename)
|
|
except IOError: # pragma: no cover
|
|
data = dataprep.read_df_sql(w.id)
|
|
try:
|
|
datalength = len(data)
|
|
except AttributeError:
|
|
datalength = 0
|
|
|
|
if datalength == 0:
|
|
data.rename(columns=columndict, inplace=True)
|
|
_ = data.to_csv(w.csvfilename+'.gz', index_label='index', compression='gzip')
|
|
|
|
try:
|
|
row = rowingdata(csvfile=filename)
|
|
except IOError: # pragma: no cover
|
|
return '' # pragma: no cover
|
|
else:
|
|
return ''
|
|
|
|
tcxfilename = w.csvfilename[:-4] + '.tcx'
|
|
try:
|
|
newnotes = w.notes + '\n from'+w.workoutsource+' via rowsandall.com'
|
|
except TypeError:
|
|
newnotes = 'from'+w.workoutsource+' via rowsandall.com'
|
|
|
|
if w.user.intervals_resample_to_1s and w_resampled:
|
|
w_resampled.delete()
|
|
row.exporttotcx(tcxfilename, notes=newnotes, sport=mytypes.intervalsmapping[w.workouttype])
|
|
if dozip:
|
|
gzfilename = tcxfilename + '.gz'
|
|
try:
|
|
with open(tcxfilename, 'rb') as inF:
|
|
s = inF.read()
|
|
with gzip.GzipFile(gzfilename, 'wb') as outF:
|
|
outF.write(s)
|
|
try:
|
|
os.remove(tcxfilename)
|
|
except WindowsError: # pragma: no cover
|
|
pass
|
|
except FileNotFoundError:
|
|
return ''
|
|
|
|
return gzfilename
|
|
|
|
return tcxfilename
|
|
|
|
|
|
def workout_export(self, workout, *args, **kwargs) -> str:
|
|
try:
|
|
token = self.open()
|
|
except NoTokenError:
|
|
return 0
|
|
dologging('intervals.icu.log', "Exporting workout {id}".format(id=workout.id))
|
|
|
|
filename = self.createworkoutdata(workout)
|
|
if not filename:
|
|
return 0
|
|
|
|
params = {
|
|
'name': workout.name,
|
|
'description': workout.notes,
|
|
'external_id': encoder.encode_hex(workout.id),
|
|
}
|
|
|
|
|
|
authorizationstring = str('Bearer ' + token)
|
|
# headers with authorization string and content type multipart/form-data
|
|
headers = {
|
|
'Authorization': authorizationstring,
|
|
}
|
|
|
|
url = "https://intervals.icu/api/v1/athlete/{athleteid}/activities".format(athleteid=0)
|
|
|
|
with open(filename, 'rb') as f:
|
|
files = {'file': f}
|
|
response = requests.post(url, params=params, headers=headers, files=files)
|
|
|
|
if response.status_code not in [200, 201]:
|
|
dologging('intervals.icu.log', response.reason)
|
|
return 0
|
|
|
|
id = response.json()['id']
|
|
# set workout type to workouttype
|
|
url = "https://intervals.icu/api/v1/activity/{activityid}".format(activityid=id)
|
|
|
|
|
|
thetype = mytypes.intervalsmapping[workout.workouttype]
|
|
response = requests.put(url, headers=headers, json={'type': thetype})
|
|
|
|
if response.status_code not in [200, 201]:
|
|
return 0
|
|
|
|
workout.uploadedtointervals = id
|
|
workout.save()
|
|
|
|
os.remove(filename)
|
|
|
|
dologging('intervals.icu.log', "Exported workout {id}".format(id=workout.id))
|
|
|
|
return id
|
|
|
|
def get_workout_list(self, *args, **kwargs) -> int:
|
|
try:
|
|
token = self.open()
|
|
except NoTokenError:
|
|
return []
|
|
|
|
url = self.oauth_data['base_url'] + 'athlete/0/activities?'
|
|
startdate = timezone.now() - timedelta(days=30)
|
|
enddate = timezone.now() + timedelta(days=1)
|
|
startdatestring = kwargs.get("startdate","")
|
|
enddatestring = kwargs.get("enddate","")
|
|
|
|
try:
|
|
startdate = arrow.get(startdatestring).datetime
|
|
except:
|
|
pass
|
|
try:
|
|
enddate = arrow.get(enddatestring).datetime
|
|
except:
|
|
pass
|
|
|
|
url += 'oldest=' + startdate.strftime('%Y-%m-%d') + '&newest=' + enddate.strftime('%Y-%m-%d')
|
|
headers = {
|
|
'accept': '*/*',
|
|
'authorization': 'Bearer ' + token
|
|
}
|
|
|
|
response = requests.get(url, headers=headers)
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return []
|
|
|
|
data = response.json()
|
|
known_interval_ids = get_known_ids(self.rower, 'intervalsid')
|
|
workouts = []
|
|
|
|
for item in data:
|
|
try:
|
|
i = item['id']
|
|
r = item['type']
|
|
d = item['distance']
|
|
ttot = seconds_to_duration(item['moving_time'])
|
|
s = item['start_date']
|
|
s2 = ''
|
|
c = item['name']
|
|
if i in known_interval_ids:
|
|
nnn = ''
|
|
else:
|
|
nnn = 'NEW'
|
|
|
|
keys = ['id','distance','duration','starttime',
|
|
'rowtype','source','name','new']
|
|
|
|
values = [i, d, ttot, s, r, s2, c, nnn]
|
|
|
|
ress = dict(zip(keys, values))
|
|
workouts.append(ress)
|
|
except KeyError:
|
|
dologging('intervals.icu.log', item)
|
|
|
|
|
|
return workouts
|
|
|
|
def update_workout(self, id, *args, **kwargs) -> int:
|
|
try:
|
|
_ = self.open()
|
|
except NoTokenError:
|
|
return 0
|
|
r = self.rower
|
|
|
|
headers = {
|
|
'Authorization': 'Bearer ' + r.intervals_token,
|
|
}
|
|
url = self.oauth_data['base_url'] + 'activity/' + str(id)
|
|
response = requests.get(url, headers=headers)
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return 0
|
|
|
|
data = response.json()
|
|
ws = Workout.objects.filter(uploadedtointervals=id)
|
|
|
|
for w in ws:
|
|
try:
|
|
w.name = data['name']
|
|
except KeyError:
|
|
pass
|
|
try:
|
|
w.notes = data['description']
|
|
except KeyError:
|
|
pass
|
|
try:
|
|
w.workouttype = mytypes.intervalsmappinginv[data['type']]
|
|
except KeyError:
|
|
pass
|
|
try:
|
|
w.rpe = data['icu_rpe']
|
|
if w.rpe is None:
|
|
w.rpe = 0
|
|
except KeyError:
|
|
w.rpe = 0
|
|
try:
|
|
w.is_commute = data['commute']
|
|
if w.is_commute is None:
|
|
w.is_commute = False
|
|
except KeyError:
|
|
w.is_commute = False
|
|
|
|
w.save()
|
|
|
|
try:
|
|
paired_session_icu_id = data['paired_event_id']
|
|
pss = PlannedSession.objects.filter(intervals_icu_id=paired_session_icu_id, rower=self.rower)
|
|
if pss.count() > 0:
|
|
for ps in pss:
|
|
w.plannedsession = ps
|
|
w.save()
|
|
except KeyError:
|
|
pass
|
|
|
|
# we stop here now
|
|
return 1
|
|
|
|
# get fit file (not used)
|
|
url = self.oauth_data['base_url'] + 'activity/' + str(id) + '/fit-file'
|
|
response = requests.get(url, headers=headers)
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return 0
|
|
|
|
try:
|
|
fit_data = response.content
|
|
fit_filename = 'media/intervals_' + str(id) + '.fit'
|
|
with open(fit_filename, 'wb') as f:
|
|
f.write(fit_data)
|
|
except:
|
|
return 0
|
|
|
|
try:
|
|
row = FP(fit_filename)
|
|
rowdata = rowingdata(df=row.df)
|
|
rowsummary = FitSummaryData(fit_filename)
|
|
except Exception as e:
|
|
dologging('intervals.icu.log', e)
|
|
return 0
|
|
|
|
for w in ws:
|
|
# copy fit_file to random file name using shutil
|
|
temp_filename = 'media/' + str(uuid4()) + '.fit'
|
|
try:
|
|
shutil.copy(fit_filename, temp_filename)
|
|
|
|
|
|
uploadoptions = {
|
|
'secret': UPLOAD_SERVICE_SECRET,
|
|
'user': self.rower.user.id,
|
|
'boattype': '1x',
|
|
'workouttype': w.workouttype,
|
|
'file': temp_filename,
|
|
'intervalsid': id,
|
|
'id': w.id,
|
|
}
|
|
url = UPLOAD_SERVICE_URL
|
|
response = requests.post(url, data=uploadoptions)
|
|
except FileNotFoundError:
|
|
return 0
|
|
except Exception as e:
|
|
dologging('intervals.icu.log', e)
|
|
|
|
# remove fit_file
|
|
try:
|
|
os.remove(fit_filename)
|
|
except:
|
|
pass
|
|
|
|
return 1
|
|
|
|
def get_workout(self, id, *args, **kwargs) -> int:
|
|
try:
|
|
_ = self.open()
|
|
except NoTokenError:
|
|
return 0
|
|
|
|
r = self.rower
|
|
|
|
do_async = kwargs.get('do_async', True)
|
|
force_download = kwargs.get('force_download', False)
|
|
|
|
# check if workout with this id already exists
|
|
known_interval_ids = get_known_ids(r, 'intervalsid')
|
|
if not force_download and id in known_interval_ids:
|
|
return self.update_workout(id)
|
|
|
|
|
|
record = create_or_update_syncrecord(r, None, intervalsid=id)
|
|
|
|
if do_async:
|
|
_ = myqueue(queuehigh,
|
|
handle_intervals_getworkout,
|
|
self.rower,
|
|
self.rower.intervals_token,
|
|
id)
|
|
|
|
return 1
|
|
|
|
authorizationstring = str('Bearer ' + r.intervals_token)
|
|
headers = {
|
|
'Authorization': authorizationstring,
|
|
}
|
|
|
|
url = self.oauth_data['base_url'] + 'activity/' + str(id)
|
|
response = requests.get(url, headers=headers)
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return 0
|
|
|
|
data = response.json()
|
|
try:
|
|
title = data['name']
|
|
except KeyError:
|
|
title = 'Intervals workout'
|
|
|
|
try:
|
|
rpe = data['icu_rpe']
|
|
except KeyError:
|
|
rpe = 0
|
|
|
|
try:
|
|
is_commute = data['commute']
|
|
if is_commute is None:
|
|
is_commute = False
|
|
except KeyError:
|
|
is_commute = False
|
|
|
|
try:
|
|
workouttype = mytypes.intervalsmappinginv[data['type']]
|
|
except KeyError:
|
|
workouttype = 'water'
|
|
|
|
url = self.oauth_data['base_url'] + 'activity/' + str(id) + '/fit-file'
|
|
response = requests.get(url, headers=headers)
|
|
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return 0
|
|
|
|
try:
|
|
fit_data = response.content
|
|
fit_filename = 'media/intervals_' + str(id) + '.fit'
|
|
with open(fit_filename, 'wb') as f:
|
|
f.write(fit_data)
|
|
except:
|
|
return 0
|
|
|
|
try:
|
|
row = FP(fit_filename)
|
|
rowdata = rowingdata(df=row.df)
|
|
rowsummary = FitSummaryData(fit_filename)
|
|
duration = totaltime_sec_to_string(rowdata.duration)
|
|
distance = rowdata.df[' Horizontal (meters)'].iloc[-1]
|
|
except:
|
|
return 0
|
|
|
|
uploadoptions = {
|
|
'secret': UPLOAD_SERVICE_SECRET,
|
|
'user': r.user.id,
|
|
'boattype': '1x',
|
|
'workouttype': workouttype,
|
|
'file': fit_filename,
|
|
'intervalsid': id,
|
|
'title': title,
|
|
'rpe': rpe,
|
|
'notes': '',
|
|
'offline': False,
|
|
}
|
|
|
|
url = UPLOAD_SERVICE_URL
|
|
handle_request_post(url, uploadoptions)
|
|
|
|
try:
|
|
pair_id = data['paired_event_id']
|
|
pss = PlannedSession.objects.filter(intervals_icu_id=pair_id, rower=r)
|
|
ws = Workout.objects.filter(uploadedtointervals=id)
|
|
if is_commute:
|
|
for w in ws:
|
|
w.is_commute = True
|
|
w.save()
|
|
if pss.count() > 0:
|
|
for ps in pss:
|
|
for w in ws:
|
|
w.plannedsession = ps
|
|
w.save()
|
|
except KeyError:
|
|
pass
|
|
except PlannedSession.DoesNotExist:
|
|
pass
|
|
except Workout.DoesNotExist:
|
|
pass
|
|
|
|
return 1
|
|
|
|
def pair_workout_and_session(self, w, id):
|
|
pass
|
|
|
|
|
|
def get_workouts(self, *args, **kwargs):
|
|
startdate = timezone.now() - timedelta(days=7)
|
|
enddate = timezone.now() + timedelta(days=1)
|
|
startdatestring = kwargs.get(startdate,"")
|
|
enddatestring = kwargs.get(enddate,"")
|
|
|
|
try:
|
|
startdate = arrow.get(startdatestring).datetime
|
|
except:
|
|
pass
|
|
try:
|
|
enddate = arrow.get(enddatestring).datetime
|
|
except:
|
|
pass
|
|
|
|
count = 0
|
|
workouts = self.get_workout_list(startdate=startdate, enddate=enddate)
|
|
for workout in workouts:
|
|
if workout['new'] == 'NEW':
|
|
self.get_workout(workout['id'])
|
|
count +=1
|
|
|
|
return count
|
|
|
|
def make_authorization_url(self, *args, **kwargs):
|
|
return super(IntervalsIntegration, self).make_authorization_url(*args, **kwargs)
|
|
|
|
def token_refresh(self, *args, **kwargs):
|
|
return super(IntervalsIntegration, self).token_refresh(*args, **kwargs)
|
|
|
|
def get_plannedsessions_list(self, *args, **kwargs):
|
|
try:
|
|
_ = self.open()
|
|
except NoTokenError:
|
|
return []
|
|
|
|
r = self.rower
|
|
|
|
headers = {
|
|
'Authorization': 'Bearer ' + r.intervals_token,
|
|
}
|
|
|
|
# first get the folders - we need the folder id for the next call
|
|
oldest = (timezone.now() - timedelta(days=30)).strftime('%Y-%m-%d')
|
|
newest = (timezone.now() + timedelta(days=30)).strftime('%Y-%m-%d')
|
|
url = self.oauth_data['base_url'] + 'athlete/0/events' #'?category=WORKOUT'
|
|
url += '?oldest=' + oldest + '&newest=' + newest
|
|
response = requests.get(url, headers=headers)
|
|
if response.status_code != 200:
|
|
return []
|
|
|
|
data = response.json()
|
|
|
|
return data
|
|
|
|
def update_plannedsession(self, ps, data, *args, **kwargs):
|
|
try:
|
|
_ = self.open()
|
|
except NoTokenError:
|
|
return 0
|
|
|
|
r = self.rower
|
|
|
|
if data['category'] == 'WORKOUT':
|
|
url = self.oauth_data['base_url'] + 'athlete/0/events/' + str(ps.intervals_icu_id) + '/downloadfit'
|
|
headers = {
|
|
'Authorization': 'Bearer ' + r.intervals_token,
|
|
}
|
|
response = requests.get(url, headers=headers)
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
else:
|
|
filename = 'planned_' + str(ps.intervals_icu_id) + '.fit'
|
|
filename2 = 'media/planned_' + str(ps.intervals_icu_id) + '.fit'
|
|
with open(filename2, 'wb') as f:
|
|
f.write(response.content)
|
|
|
|
data['fitfile'] = filename
|
|
|
|
return data
|
|
|
|
def get_plannedsession(self, id, *args, **kwargs):
|
|
try:
|
|
_ = self.open()
|
|
except NoTokenError:
|
|
return 0
|
|
|
|
r = self.rower
|
|
|
|
url = self.oauth_data['base_url'] + 'athlete/0/events/' + str(id)
|
|
headers = {
|
|
'Authorization': 'Bearer ' + r.intervals_token,
|
|
}
|
|
response = requests.get(url, headers=headers)
|
|
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return 0
|
|
|
|
data = response.json()
|
|
|
|
# get file from athlete/0/events/{id}/downloadfit
|
|
if data['category'] == 'WORKOUT':
|
|
url = self.oauth_data['base_url'] + 'athlete/0/events/' + str(id) + '/downloadfit'
|
|
response = requests.get(url, headers=headers)
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return 0
|
|
|
|
filename = 'planned_' + str(id) + '.fit'
|
|
filename2 = 'media/planned_' + str(id) + '.fit'
|
|
with open(filename2, 'wb') as f:
|
|
f.write(response.content)
|
|
|
|
data['fitfile'] = filename
|
|
|
|
|
|
return data
|
|
|
|
def plannedsession_create(self, ps, *args, **kwargs):
|
|
try:
|
|
_ = self.open()
|
|
except NoTokenError:
|
|
return 0
|
|
|
|
r = self.rower
|
|
|
|
headers = {
|
|
'Authorization': 'Bearer ' + r.intervals_token,
|
|
}
|
|
|
|
stepstext = ps.steps_intervals()
|
|
|
|
category = 'WORKOUT'
|
|
startdate = ps.preferreddate.strftime('%Y-%m-%dT%H:%M:%S')
|
|
enddate = ps.preferreddate.strftime('%Y-%m-%d') + 'T23:59:59'
|
|
if ps.sessiontype == 'cycletarget':
|
|
category = 'TARGET'
|
|
startdate = ps.startdate.strftime('%Y-%m-%dT%H:%M:%S')
|
|
enddate = ps.enddate.strftime('%Y-%m-%d') + 'T23:59:59'
|
|
|
|
data = {
|
|
"start_date_local": startdate,
|
|
"type": mytypes.intervalsmapping[ps.sessionsport],
|
|
"category": category,
|
|
"end_date_local": enddate,
|
|
"name": ps.name,
|
|
"description": stepstext,
|
|
"indoor": ps.sessionsport in mytypes.ergtypes,
|
|
'external_id': ps.id,
|
|
}
|
|
|
|
if ps.sessiontype == 'cycletarget':
|
|
if ps.sessionmode == 'time':
|
|
data['time_target'] = ps.sessionvalue*60
|
|
elif ps.sessionmode == 'distance':
|
|
data['distance_target'] = ps.sessionvalue
|
|
elif ps.sessionmode == 'rScore':
|
|
data['load_target'] = ps.sessionvalue
|
|
elif ps.sessionmode == 'Trimp':
|
|
data['load_target'] = ps.sessionvalue/2.
|
|
|
|
url = self.oauth_data['base_url'] + 'athlete/0/events'
|
|
response = requests.post(url, headers=headers, json=data)
|
|
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return 0
|
|
|
|
data = response.json()
|
|
id = data['id']
|
|
ps.intervals_icu_id = id
|
|
ps.save()
|
|
|
|
return id
|
|
|
|
def plannedsession_delete(self, ps, *args, **kwargs):
|
|
try:
|
|
_ = self.open()
|
|
except NoTokenError:
|
|
return 0
|
|
|
|
r = self.rower
|
|
|
|
headers = {
|
|
'Authorization': 'Bearer ' + r.intervals_token,
|
|
}
|
|
|
|
url = self.oauth_data['base_url'] + 'athlete/0/events/' + str(ps.intervals_icu_id)
|
|
|
|
response = requests.delete(url, headers=headers)
|
|
|
|
if response.status_code != 200:
|
|
dologging('intervals.icu.log', response.text)
|
|
return 0
|
|
|
|
ps.intervals_icu_id = None
|
|
ps.save()
|
|
|
|
return 1
|
|
|
|
def update_calendar(self, r, event, *args, **kwargs):
|
|
try:
|
|
records = event["events"]
|
|
except KeyError:
|
|
records = []
|
|
|
|
for record in records:
|
|
id = record['id']
|
|
data = {}
|
|
try:
|
|
pss = PlannedSession.objects.filter(intervals_icu_id=id)
|
|
if pss.count() > 0:
|
|
ps = pss[0]
|
|
data = self.update_plannedsession(ps, record)
|
|
else:
|
|
data = self.get_plannedsession(id)
|
|
ps = PlannedSession(
|
|
manager=r.user,
|
|
intervals_icu_id=id,
|
|
)
|
|
ps.save()
|
|
ps.rower.add(r)
|
|
except PlannedSession.DoesNotExist:
|
|
continue
|
|
|
|
# got data
|
|
if data:
|
|
if data['category'].lower() not in ['workout', 'target']:
|
|
continue
|
|
ps.name = data['name']
|
|
try:
|
|
ps.comment = data['description']
|
|
except KeyError:
|
|
ps.comment = ''
|
|
ps.startdate = arrow.get(data['start_date_local']).datetime
|
|
ps.enddate = arrow.get(data['end_date_local']).datetime
|
|
ps.preferreddate = arrow.get(data['start_date_local']).datetime
|
|
try:
|
|
ps.sessionsport = mytypes.intervalsmappinginv[data['type']]
|
|
except KeyError:
|
|
ps.sessionsport = 'water'
|
|
|
|
ps.sessiontype = 'session'
|
|
ps.save()
|
|
try:
|
|
timetarget = data['time_target']
|
|
except KeyError:
|
|
timetarget = None
|
|
if timetarget is None:
|
|
try:
|
|
timetarget = data['moving_time']
|
|
except KeyError:
|
|
timetarget = None
|
|
if timetarget is None:
|
|
timetarget = 3600
|
|
timetarget = int(timetarget)/60.
|
|
ps.sessionvalue = timetarget
|
|
ps.save()
|
|
if data['category'].lower() == 'workout':
|
|
try:
|
|
ps.fitfile = data['fitfile']
|
|
ps.save()
|
|
ps.update_steps()
|
|
except KeyError:
|
|
pass
|
|
if data['category'].lower() == 'target':
|
|
ps.sessiontype = 'cycletarget'
|
|
ps.sessionvalue = int(data['time_target'])/60.
|
|
ps.enddate = ps.startdate + datetime.timedelta(days=6)
|
|
ps.save()
|
|
|
|
try:
|
|
deleted_records = event["deleted_events"]
|
|
except KeyError:
|
|
deleted_records = []
|
|
|
|
for record in deleted_records:
|
|
id = record['id']
|
|
try:
|
|
pss = PlannedSession.objects.filter(intervals_icu_id=id)
|
|
if r.intervals_delete_plannedsession and pss.count() > 0:
|
|
for ps in pss:
|
|
ps.delete()
|
|
except PlannedSession.DoesNotExist:
|
|
continue
|
|
|
|
return 1
|
|
|
|
def import_activities(self, event, *args, **kwargs):
|
|
if not self.rower.intervals_auto_import:
|
|
return 0
|
|
|
|
try:
|
|
record = event["activity"]
|
|
except KeyError:
|
|
records = []
|
|
|
|
try:
|
|
id = record['id']
|
|
external_id = record['external_id']
|
|
try:
|
|
w = Workout.objects.get(id=encoder.decode_hex(external_id))
|
|
result = self.update_workout(id)
|
|
except Workout.DoesNotExist:
|
|
result = self.get_workout(id, do_async=False)
|
|
except ValueError:
|
|
result = self.get_workout(id, do_async=False)
|
|
except KeyError:
|
|
pass
|
|
|
|
return 1
|
|
|
|
|
|
def delete_activities(self, event, *args, **kwargs):
|
|
try:
|
|
record = event["activity"]
|
|
except KeyError:
|
|
records = []
|
|
|
|
try:
|
|
id = record['id']
|
|
try:
|
|
ws = Workout.objects.filter(uploadedtointervals=id)
|
|
for w in ws:
|
|
if w.user == self.rower and self.rower.intervals_auto_delete:
|
|
w.delete()
|
|
elif w.user == self.rower:
|
|
w.uploadedtointervals = None
|
|
w.save()
|
|
except Workout.DoesNotExist:
|
|
pass
|
|
except KeyError:
|
|
pass
|
|
|
|
return 1
|
|
|
|
def update_activities(self, event, *args, **kwargs):
|
|
try:
|
|
record = event["activity"]
|
|
except KeyError:
|
|
records = []
|
|
|
|
try:
|
|
id = record['id']
|
|
result = self.update_workout(id)
|
|
except KeyError:
|
|
pass
|
|
|
|
return 1
|