Private
Public Access
1
0

Merge branch 'develop' into feature/icu_sessions

This commit is contained in:
2024-12-16 19:34:05 +01:00
5 changed files with 27 additions and 11 deletions

View File

@@ -1648,10 +1648,14 @@ def read_data(columns, ids=[], doclean=True, workstrokesonly=True, debug=False,
datadf = pl.concat(data) datadf = pl.concat(data)
existing_columns = [col for col in columns if col in datadf.columns] existing_columns = [col for col in columns if col in datadf.columns]
datadf = datadf.select(existing_columns) datadf = datadf.select(existing_columns)
except (ShapeError, SchemaError, ColumnNotFoundError): except (ShapeError, SchemaError):
data = [ try:
df.select(columns) data = [
for df in data] df.select(columns)
for df in data]
except ColumnNotFoundError:
existing_columns = [col for col in columns if col in df.columns]
df = df.select(existing_columns)
# float columns # float columns
floatcolumns = [] floatcolumns = []
@@ -1686,14 +1690,19 @@ def read_data(columns, ids=[], doclean=True, workstrokesonly=True, debug=False,
] ]
except ComputeError: except ComputeError:
pass pass
except ColumnNotFoundError:
pass
try: try:
datadf = pl.concat(data) datadf = pl.concat(data)
except SchemaError: except SchemaError:
data = [ try:
df.with_columns(cs.integer().cast(pl.Float64)) for df in data data = [
] df.with_columns(cs.integer().cast(pl.Float64)) for df in data
datadf = pl.concat(data) ]
datadf = pl.concat(data)
except ShapeError:
return pl.DataFrame()

View File

@@ -18,6 +18,7 @@ from uuid import uuid4
from django.utils import timezone from django.utils import timezone
from datetime import timedelta from datetime import timedelta
import rowers.dataprep as dataprep import rowers.dataprep as dataprep
from rowers.opaque import encoder
from rowsandall_app.settings import ( from rowsandall_app.settings import (
INTERVALS_CLIENT_ID, INTERVALS_REDIRECT_URI, INTERVALS_CLIENT_SECRET, SITE_URL INTERVALS_CLIENT_ID, INTERVALS_REDIRECT_URI, INTERVALS_CLIENT_SECRET, SITE_URL
@@ -171,6 +172,7 @@ class IntervalsIntegration(SyncIntegration):
params = { params = {
'name': workout.name, 'name': workout.name,
'description': workout.notes, 'description': workout.notes,
'external_id': encoder.encode_hex(workout.id),
} }

View File

@@ -67,7 +67,10 @@ class TPIntegration(SyncIntegration):
except TypeError: except TypeError:
newnotes = 'from '+w.workoutsource+' via rowsandall.com' newnotes = 'from '+w.workoutsource+' via rowsandall.com'
row.exporttotcx(tcxfilename, notes=newnotes, sport=tpmapping[w.workouttype]) try:
row.exporttotcx(tcxfilename, notes=newnotes, sport=tpmapping[w.workouttype])
except KeyError:
row.exporttotcx(tcxfilename, notes=newnotes, sport='other')
return tcxfilename return tcxfilename

View File

@@ -3741,7 +3741,8 @@ def handle_c2_async_workout(alldata, userid, c2token, c2id, delaysec,
code=uuid4().hex[:16], c2id=c2id) code=uuid4().hex[:16], c2id=c2id)
startdatetime, starttime, workoutdate, duration, starttimeunix, timezone = utils.get_startdatetime_from_c2data( startdatetime, starttime, workoutdate, duration, starttimeunix, timezone = utils.get_startdatetime_from_c2data(
data) data
)
s = 'Time zone {timezone}, startdatetime {startdatetime}, duration {duration}'.format( s = 'Time zone {timezone}, startdatetime {startdatetime}, duration {duration}'.format(
timezone=timezone, startdatetime=startdatetime, timezone=timezone, startdatetime=startdatetime,
@@ -3801,6 +3802,7 @@ def handle_c2_async_workout(alldata, userid, c2token, c2id, delaysec,
strokelength = np.zeros(nr_rows) strokelength = np.zeros(nr_rows)
dist2 = 0.1*strokedata.loc[:, 'd'] dist2 = 0.1*strokedata.loc[:, 'd']
cumdist, intervals = make_cumvalues(dist2)
try: try:
spm = strokedata.loc[:, 'spm'] spm = strokedata.loc[:, 'spm']
@@ -3842,7 +3844,7 @@ def handle_c2_async_workout(alldata, userid, c2token, c2id, delaysec,
' lapIdx': lapidx, ' lapIdx': lapidx,
' WorkoutState': 4, ' WorkoutState': 4,
' ElapsedTime (sec)': seconds, ' ElapsedTime (sec)': seconds,
'cum_dist': dist2 'cum_dist': cumdist
}) })
df.sort_values(by='TimeStamp (sec)', ascending=True) df.sort_values(by='TimeStamp (sec)', ascending=True)

Binary file not shown.