Private
Public Access
1
0

adding invalidoperationerror

This commit is contained in:
2024-04-13 11:33:24 +02:00
parent d6ee7ca2e5
commit 4b7ab5f923

View File

@@ -32,7 +32,10 @@ import zipfile
import os
from rowers.models import strokedatafields
import polars as pl
from polars.exceptions import ColumnNotFoundError, SchemaError
from polars.exceptions import (
ColumnNotFoundError, SchemaError, ComputeError,
InvalidOperationError
)
from rowingdata import (
KinoMapParser,
@@ -689,29 +692,29 @@ def clean_df_stats_pl(datadf, workstrokesonly=True, ignorehr=True,
return datadf
try:
datadf = datadf.with_columns((-pl.col('catch')).alias('catch'))
except (KeyError, TypeError):
except (KeyError, TypeError, InvalidOperationError):
pass
try:
datadf = datadf.with_columns((pl.col('peakforceangle')+1000).alias('peakforceangle'))
except (KeyError, TypeError):
except (KeyError, TypeError, InvalidOperationError):
pass
try:
datadf = datadf.with_columns((pl.col('hr')+10).alias('hr'))
except (KeyError, TypeError):
except (KeyError, TypeError, InvalidOperationError):
pass
# protect 0 spm values from being nulled
try:
datadf = datadf.with_columns((pl.col('spm')+1.0).alias('spm'))
except (KeyError, TypeError):
except (KeyError, TypeError, InvalidOperationError):
pass
# protect 0 workoutstate values from being nulled
try:
datadf = datadf.with_columns((pl.col('workoutstate')+1).alias('workoutstate'))
except (KeyError, TypeError):
except (KeyError, TypeError, InvalidOperationError):
pass
try:
@@ -2028,16 +2031,28 @@ def dataplep(rowdatadf, id=0, inboard=0.88, forceunit='lbs', bands=True, barchar
if windowsize <= 3:
windowsize = 5
df.with_columns(
(pl.col(" Cadence (stokes/min)").map_batches(lambda x: savgol_filter(x.to_numpy(), windowsize, 3)).explode()
).alias(" Cadence (stokes/min)"))
df.with_columns(
(pl.col(" DriveLength (meters)").map_batches(lambda x: savgol_filter(x.to_numpy(), windowsize, 3)).explode()
).alias(" DriveLength (meters)"))
df.with_columns(
(pl.col(" HRCur (bpm)").map_batches(lambda x: savgol_filter(x.to_numpy(), windowsize, 3)).explode()
).alias(" HRCur (bpm)"))
df.with_columns((pl.col("forceratio").map_batches(lambda x: savgol_filter(x.to_numpy(), windowsize, 3)).explode()).alias("forceratio"))
try:
df.with_columns(
(pl.col(" Cadence (stokes/min)").map_batches(lambda x: savgol_filter(x.to_numpy(), windowsize, 3)).explode()
).alias(" Cadence (stokes/min)"))
except ComputeError:
pass
try:
df.with_columns(
(pl.col(" DriveLength (meters)").map_batches(lambda x: savgol_filter(x.to_numpy(), windowsize, 3)).explode()
).alias(" DriveLength (meters)"))
except ComputeError:
pass
try:
df.with_columns(
(pl.col(" HRCur (bpm)").map_batches(lambda x: savgol_filter(x.to_numpy(), windowsize, 3)).explode()
).alias(" HRCur (bpm)"))
except ComputeError:
pass
try:
df.with_columns((pl.col("forceratio").map_batches(lambda x: savgol_filter(x.to_numpy(), windowsize, 3)).explode()).alias("forceratio"))
except ComputeError:
pass
df = df.with_columns((pl.col(" DriveLength (meters)") / pl.col(" DriveTime (ms)") * 1.0e3).alias("drivespeed"))
@@ -2621,7 +2636,6 @@ def add_c2_stroke_data_db(strokedata, workoutid, starttimeunix, csvfilename,
' WorkoutState': 4,
' ElapsedTime (sec)': seconds,
'cum_dist': dist2,
' WorkoutState': 0,
})
df.sort_values(by='TimeStamp (sec)', ascending=True)