Private
Public Access
1
0

getting intervals_import working

This commit is contained in:
2025-10-22 16:50:37 +02:00
parent c76334c50a
commit 0796f34904
9 changed files with 265 additions and 205 deletions

View File

@@ -42,7 +42,12 @@ from rowers import uploads
from rowingdata import rower as rrower
from rowers.dataroutines import rdata, get_startdate_time_zone, df_resample, checkduplicates, dataplep
from rowers.dataroutines import (
rdata, get_startdate_time_zone, df_resample, checkduplicates, dataplep,
get_workouttype_from_fit,
get_title_from_fit,
get_notes_from_fit,
)
from rowers.mytypes import otetypes, otwtypes
from rowers.utils import totaltime_sec_to_string
from rowers.dataprep import check_marker, checkbreakthrough, update_wps, handle_nonpainsled
@@ -299,7 +304,7 @@ def update_workout_attributes(w, row, file_path, uploadoptions,
)
boattype = uploadoptions.get('boattype', '1x')
workoutsource = 'unknown'
workoutsource = uploadoptions.get('workoutsource', 'unknown')
stravaid = uploadoptions.get('stravaid', 0)
rpe = uploadoptions.get('rpe', 0)
notes = uploadoptions.get('notes', '')
@@ -349,6 +354,8 @@ def update_workout_attributes(w, row, file_path, uploadoptions,
if uploadoptions.get('summary', '') == '':
summary = row.allstats()
else:
summary = uploadoptions.get('summary', '')
if uploadoptions.get('makeprivate', False):
privacy = 'hidden'
@@ -567,6 +574,12 @@ def process_single_file(file_path, uploadoptions, job_id, debug=False, **kwargs)
f2,
fileformat,
)
uploadoptions['summary'] = summary
uploadoptions['oarlength'] = oarlength
uploadoptions['inboard'] = inboard
uploadoptions['useImpeller'] = impeller
if uploadoptions['workouttype'] != 'strave':
uploadoptions['workoutsource'] = fileformat
if not f2:
return {
"status": "error",
@@ -624,7 +637,7 @@ def process_single_file(file_path, uploadoptions, job_id, debug=False, **kwargs)
except Exception as e:
pass
workoutid = uploadoptions.get('workoutid', None)
workoutid = uploadoptions.get('id', None)
if workoutid is not None:
try:
w = Workout.objects.get(id=workoutid)
@@ -679,7 +692,7 @@ def process_single_file(file_path, uploadoptions, job_id, debug=False, **kwargs)
r.ftp, r.sex, r.hrftp, r.max, r.rest, wps_avg)
# make plots
if uploadoptions['make_plot']:
if uploadoptions.get('makeplot', False):
plottype = uploadoptions.get('plottype', 'timeplot')
res, jobid = uploads.make_plot(r, w, f1, f2, plottype, w.name)
elif r.staticchartonupload != 'None': # pragma: no cover

View File

@@ -10,7 +10,7 @@ from rowers import mytypes
import shutil
from rowers.rower_rules import is_workout_user, ispromember
from rowers.utils import myqueue, dologging, custom_exception_handler
from rowers.tasks import handle_intervals_getworkout, handle_request_post
from rowers.upload_tasks import handle_intervals_getworkout
import urllib
import gzip

View File

@@ -106,7 +106,6 @@ def add_workout_from_data(userid, nkid, data, strokedata, source='nk', splitdata
boattype = "1x"
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'user': userid,
'file': csvfilename,
'title': title,
@@ -128,26 +127,14 @@ def add_workout_from_data(userid, nkid, data, strokedata, source='nk', splitdata
dologging('nklog.log',json.dumps(uploadoptions))
dologging('metrics.log','NK ID {nkid}'.format(nkid=nkid))
session = requests.session()
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
session.headers.update(newHeaders)
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
response = upload_handler(uploadoptions, csvfilename)
if response.status_code != 200: # pragma: no cover
if response["status"] == "progressing": # pragma: no cover
return 0, response.text
else:
dologging('nklog.log','Upload response: {resp}'.format(resp=json.dumps(response)))
try:
workoutid = response.json()['id']
except KeyError: # pragma: no cover
workoutid = 0
# dologging('nklog.log','Workout ID {id}'.format(id=workoutid))
# evt update workout summary
# return
return workoutid, ""
return 0, response
def get_nk_intervalstats(workoutdata, strokedata):

View File

@@ -434,49 +434,6 @@ def handle_loadnextweek(rower, debug=False, **kwargs):
return 0
@app.task
def handle_assignworkouts(workouts, rowers, remove_workout, debug=False, **kwargs):
for workout in workouts:
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'title': workout.name,
'boattype': workout.boattype,
'workouttype': workout.workouttype,
'inboard': workout.inboard,
'oarlength': workout.oarlength,
'summary': workout.summary,
'elapsedTime': 3600.*workout.duration.hour+60*workout.duration.minute+workout.duration.second,
'totalDistance': workout.distance,
'useImpeller': workout.impeller,
'seatNumber': workout.seatnumber,
'boatName': workout.boatname,
'portStarboard': workout.empowerside,
}
for rower in rowers:
failed = False
csvfilename = 'media/{code}.csv'.format(code=uuid4().hex[:16])
try:
with open(csvfilename,'wb') as f:
shutil.copy(workout.csvfilename,csvfilename)
except FileNotFoundError:
try:
with open(csvfilename,'wb') as f:
csvfilename = csvfilename+'.gz'
shutil.copy(workout.csvfilename+'.gz', csvfilename)
except:
failed = True
if not failed:
uploadoptions['user'] = rower.user.id
uploadoptions['file'] = csvfilename
session = requests.session()
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
session.headers.update(newHeaders)
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
print(response.text)
if remove_workout:
workout.delete()
return 1
@app.task
def create_sessions_from_json_async(plansteps, rower, startdate, manager, planbyrscore, plan, plan_past_days, debug=False, **kwargs):
@@ -532,19 +489,6 @@ def create_sessions_from_json_async(plansteps, rower, startdate, manager, planby
return 1
@app.task
def handle_post_workout_api(uploadoptions, debug=False, **kwargs): # pragma: no cover
session = requests.session()
newHeaders = {'Content-type': 'application/json', 'Accept': 'text/plain'}
session.headers.update(newHeaders)
response = session.post(UPLOAD_SERVICE_URL, json=uploadoptions)
if response.status_code != 200:
return 0
return 1
@app.task
def handle_remove_workouts_team(ws, t, debug=False, **kwargs): # pragma: no cover
@@ -3744,128 +3688,6 @@ def handle_intervals_updateworkout(workout, debug=False, **kwargs):
return 0
@app.task
def handle_intervals_getworkout(rower, intervalstoken, workoutid, debug=False, **kwargs):
authorizationstring = str('Bearer '+intervalstoken)
headers = {
'authorization': authorizationstring,
}
url = "https://intervals.icu/api/v1/activity/{}".format(workoutid)
response = requests.get(url, headers=headers)
if response.status_code != 200:
return 0
data = response.json()
try:
title = data['name']
except KeyError:
title = 'Intervals workout'
try:
workouttype = intervalsmappinginv[data['type']]
except KeyError:
workouttype = 'water'
try:
rpe = data['icu_rpe']
except KeyError:
rpe = 0
try:
is_commute = data['commute']
if is_commute is None:
is_commute = False
except KeyError:
is_commute = False
try:
subtype = data['sub_type']
if subtype is not None:
subtype = subtype.capitalize()
except KeyError:
subtype = None
try:
is_race = data['race']
if is_race is None:
is_race = False
except KeyError:
is_race = False
url = "https://intervals.icu/api/v1/activity/{workoutid}/fit-file".format(workoutid=workoutid)
response = requests.get(url, headers=headers)
if response.status_code != 200:
return 0
try:
fit_data = response.content
fit_filename = 'media/'+f'{uuid4().hex[:16]}.fit'
with open(fit_filename, 'wb') as fit_file:
fit_file.write(fit_data)
except Exception as e:
return 0
try:
row = FP(fit_filename)
rowdata = rowingdata.rowingdata(df=row.df)
rowsummary = FitSummaryData(fit_filename)
duration = totaltime_sec_to_string(rowdata.duration)
distance = rowdata.df[" Horizontal (meters)"].iloc[-1]
except Exception as e:
return 0
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'user': rower.user.id,
'boattype': '1x',
'workouttype': workouttype,
'file': fit_filename,
'intervalsid': workoutid,
'title': title,
'rpe': rpe,
'notes': '',
'offline': False,
}
url = UPLOAD_SERVICE_URL
handle_request_post(url, uploadoptions)
try:
paired_event_id = data['paired_event_id']
ws = Workout.objects.filter(uploadedtointervals=workoutid)
for w in ws:
w.sub_type = subtype
w.save()
if is_commute:
for w in ws:
w.is_commute = True
w.sub_type = "Commute"
w.save()
if is_race:
for w in ws:
w.is_race = True
w.save()
if ws.count() > 0:
pss = PlannedSession.objects.filter(rower=rower,intervals_icu_id=paired_event_id)
if pss.count() > 0:
for ps in pss:
for w in ws:
w.plannedsession = ps
w.save()
except KeyError:
pass
except Workout.DoesNotExist:
pass
except PlannedSession.DoesNotExist:
pass
return 1
@app.task
def handle_c2_getworkout(userid, c2token, c2id, defaulttimezone, debug=False, **kwargs):

Binary file not shown.

231
rowers/upload_tasks.py Normal file
View File

@@ -0,0 +1,231 @@
import os
from uuid import uuid4
import shutil
import requests
from rowingdata import FITParser as FP
from rowingdata.otherparsers import FitSummaryData
import rowingdata
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
from YamJam import yamjam
CFG = yamjam()['rowsandallapp']
try:
os.environ.setdefault("DJANGO_SETTINGS_MODULE",CFG['settings_name'])
except KeyError: # pragma: no cover
os.environ.setdefault("DJANGO_SETTINGS_MODULE","rowsandall_app.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from rowers.models import (
Workout, GeoPolygon, GeoPoint, GeoCourse,
VirtualRaceResult, CourseTestResult, Rower,
GraphImage, Team, PlannedSession
)
from rowers.session_utils import is_session_complete
from rowers.nkimportutils import (
get_nk_summary, get_nk_allstats, get_nk_intervalstats, getdict, strokeDataToDf,
add_workout_from_data
)
from rowers.mytypes import intervalsmappinginv
from rowers.dataroutines import (
totaltime_sec_to_string,
)
from rowers.celery import app
from celery import shared_task
SITE_URL = CFG['site_url']
SITE_URL_DEV = CFG['site_url']
PROGRESS_CACHE_SECRET = CFG['progress_cache_secret']
try:
SETTINGS_NAME = CFG['settings_name']
except KeyError: # pragma: no cover
SETTINGS_NAME = 'rowsandall_ap.settings'
NK_API_LOCATION = CFG["nk_api_location"]
TP_CLIENT_ID = CFG["tp_client_id"]
TP_CLIENT_SECRET = CFG["tp_client_secret"]
TP_API_LOCATION = CFG["tp_api_location"]
tpapilocation = TP_API_LOCATION
from rowers.dataflow import upload_handler
@app.task
def handle_assignworkouts(workouts, rowers, remove_workout, debug=False, **kwargs):
for workout in workouts:
uploadoptions = {
'secret': UPLOAD_SERVICE_SECRET,
'title': workout.name,
'boattype': workout.boattype,
'workouttype': workout.workouttype,
'inboard': workout.inboard,
'oarlength': workout.oarlength,
'summary': workout.summary,
'elapsedTime': 3600.*workout.duration.hour+60*workout.duration.minute+workout.duration.second,
'totalDistance': workout.distance,
'useImpeller': workout.impeller,
'seatNumber': workout.seatnumber,
'boatName': workout.boatname,
'portStarboard': workout.empowerside,
}
for rower in rowers:
failed = False
csvfilename = 'media/{code}.csv'.format(code=uuid4().hex[:16])
try:
with open(csvfilename,'wb') as f:
shutil.copy(workout.csvfilename,csvfilename)
except FileNotFoundError:
try:
with open(csvfilename,'wb') as f:
csvfilename = csvfilename+'.gz'
shutil.copy(workout.csvfilename+'.gz', csvfilename)
except:
failed = True
if not failed:
uploadoptions['user'] = rower.user.id
uploadoptions['file'] = csvfilename
result = upload_handler(uploadoptions, csvfilename)
if remove_workout:
workout.delete()
return 1
@app.task
def handle_post_workout_api(uploadoptions, debug=False, **kwargs): # pragma: no cover
csvfilename = uploadoptions['file']
return upload_handler(uploadoptions, csvfilename)
@app.task
def handle_intervals_getworkout(rower, intervalstoken, workoutid, debug=False, **kwargs):
authorizationstring = str('Bearer '+intervalstoken)
headers = {
'authorization': authorizationstring,
}
url = "https://intervals.icu/api/v1/activity/{}".format(workoutid)
response = requests.get(url, headers=headers)
if response.status_code != 200:
return 0
data = response.json()
try:
title = data['name']
except KeyError:
title = 'Intervals workout'
try:
workouttype = intervalsmappinginv[data['type']]
except KeyError:
workouttype = 'water'
try:
rpe = data['icu_rpe']
except KeyError:
rpe = 0
try:
is_commute = data['commute']
if is_commute is None:
is_commute = False
except KeyError:
is_commute = False
try:
subtype = data['sub_type']
if subtype is not None:
subtype = subtype.capitalize()
except KeyError:
subtype = None
try:
is_race = data['race']
if is_race is None:
is_race = False
except KeyError:
is_race = False
url = "https://intervals.icu/api/v1/activity/{workoutid}/fit-file".format(workoutid=workoutid)
response = requests.get(url, headers=headers)
if response.status_code != 200:
return 0
try:
fit_data = response.content
fit_filename = 'media/'+f'{uuid4().hex[:16]}.fit'
with open(fit_filename, 'wb') as fit_file:
fit_file.write(fit_data)
except Exception as e:
return 0
try:
row = FP(fit_filename)
rowdata = rowingdata.rowingdata(df=row.df)
rowsummary = FitSummaryData(fit_filename)
duration = totaltime_sec_to_string(rowdata.duration)
distance = rowdata.df[" Horizontal (meters)"].iloc[-1]
except Exception as e:
return 0
w = Workout(
user=rower,
duration=duration,
uploadedtointervals=workoutid,
)
w.save()
uploadoptions = {
'user': rower.user.id,
'boattype': '1x',
'workouttype': workouttype,
'file': fit_filename,
'intervalsid': workoutid,
'title': title,
'rpe': rpe,
'notes': '',
'offline': False,
'id': w.id,
}
response = upload_handler(uploadoptions, fit_filename)
if response['status'] != 'processing':
return 0
try:
paired_event_id = data['paired_event_id']
ws = Workout.objects.filter(uploadedtointervals=workoutid)
for w in ws:
w.sub_type = subtype
w.save()
if is_commute:
for w in ws:
w.is_commute = True
w.sub_type = "Commute"
w.save()
if is_race:
for w in ws:
w.is_race = True
w.save()
if ws.count() > 0:
pss = PlannedSession.objects.filter(rower=rower,intervals_icu_id=paired_event_id)
if pss.count() > 0:
for ps in pss:
for w in ws:
w.plannedsession = ps
w.save()
except KeyError:
pass
except Workout.DoesNotExist:
pass
except PlannedSession.DoesNotExist:
pass
return w.id

View File

@@ -2,7 +2,6 @@ from rowers.mytypes import workouttypes, boattypes, ergtypes, otwtypes, workouts
from rowers.rower_rules import is_promember
from rowers.integrations import *
from rowers.utils import (
geo_distance, serialize_list, deserialize_list, uniqify,
str2bool, range_to_color_hex, absolute, myqueue, NoTokenError
@@ -130,6 +129,11 @@ def make_plot(r, w, f1, f2, plottype, title, imagename='', plotnr=0):
def do_sync(w, options, quick=False):
from rowers.integrations import (
C2Integration, IntervalsIntegration,
SportTracksIntegration, TPIntegration,
StravaIntegration,
)
if w.duplicate:
return 0

View File

@@ -777,7 +777,7 @@ def strokedatajson_v3(request):
"""
POST: Add Stroke data to workout
GET: Get stroke data of workout
This v2 API works on stroke based data dict:
This v3 API works on stroke based data dict:
{
"distance": 2100,
"elapsedTime": 592,

View File

@@ -255,7 +255,6 @@ from rowers.plannedsessions import *
from rowers.tasks import handle_makeplot, handle_otwsetpower, handle_sendemailtcx, handle_sendemailcsv
from rowers.tasks import (
handle_intervals_updateworkout,
handle_post_workout_api,
handle_sendemail_newftp,
instroke_static,
fetch_rojabo_session,
@@ -282,10 +281,14 @@ from rowers.tasks import (
handle_send_email_instantplan_notification,
handle_nk_async_workout,
check_tp_workout_id,
handle_assignworkouts,
handle_split_workout_by_intervals,
)
from rowers.upload_tasks import (
handle_assignworkouts,
handle_post_workout_api,
)
from scipy.signal import savgol_filter
try: