Merge branch 'release/v14.92'
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -2,6 +2,7 @@
|
|||||||
/django_cache/
|
/django_cache/
|
||||||
*.crt
|
*.crt
|
||||||
*.key
|
*.key
|
||||||
|
*.log
|
||||||
garminlog.log
|
garminlog.log
|
||||||
strava_webhooks.log
|
strava_webhooks.log
|
||||||
|
|
||||||
@@ -60,6 +61,7 @@ config.yaml
|
|||||||
# virtualenv
|
# virtualenv
|
||||||
/venv/
|
/venv/
|
||||||
/venv38/
|
/venv38/
|
||||||
|
/venv39/
|
||||||
/py27/
|
/py27/
|
||||||
/py2/
|
/py2/
|
||||||
/django2/
|
/django2/
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ apipkg==1.5
|
|||||||
appdirs==1.4.3
|
appdirs==1.4.3
|
||||||
arcgis==1.6.0
|
arcgis==1.6.0
|
||||||
arrow==0.13.1
|
arrow==0.13.1
|
||||||
asgiref==3.2.10
|
|
||||||
asn1crypto==0.24.0
|
asn1crypto==0.24.0
|
||||||
atomicwrites==1.3.0
|
atomicwrites==1.3.0
|
||||||
attrs==19.1.0
|
attrs==19.1.0
|
||||||
@@ -13,8 +12,7 @@ billiard==3.6.0.0
|
|||||||
bleach==3.1.0
|
bleach==3.1.0
|
||||||
bokeh==2.2.3
|
bokeh==2.2.3
|
||||||
boto==2.49.0
|
boto==2.49.0
|
||||||
boto3==1.14.7
|
bottle==0.12.18
|
||||||
botocore==1.17.7
|
|
||||||
braintree==3.55.0
|
braintree==3.55.0
|
||||||
cairocffi==1.0.2
|
cairocffi==1.0.2
|
||||||
celery==4.3.0
|
celery==4.3.0
|
||||||
@@ -23,13 +21,13 @@ cffi==1.14.0
|
|||||||
chardet==3.0.4
|
chardet==3.0.4
|
||||||
Click==7.0
|
Click==7.0
|
||||||
cloudpickle==1.2.2
|
cloudpickle==1.2.2
|
||||||
colorama==0.4.4
|
colorama==0.4.1
|
||||||
colorclass==2.2.0
|
colorclass==2.2.0
|
||||||
cookies==2.2.1
|
cookies==2.2.1
|
||||||
coreapi==2.3.3
|
coreapi==2.3.3
|
||||||
coreschema==0.0.4
|
coreschema==0.0.4
|
||||||
coverage==4.5.3
|
coverage==4.5.3
|
||||||
cryptography==2.9.2
|
cryptography==2.6.1
|
||||||
cycler==0.10.0
|
cycler==0.10.0
|
||||||
Cython==0.29.21
|
Cython==0.29.21
|
||||||
dask==2.20.0
|
dask==2.20.0
|
||||||
@@ -57,11 +55,10 @@ django-rest-framework==0.1.0
|
|||||||
django-rest-swagger==2.2.0
|
django-rest-swagger==2.2.0
|
||||||
django-rq==1.3.1
|
django-rq==1.3.1
|
||||||
django-rq-dashboard==0.3.3
|
django-rq-dashboard==0.3.3
|
||||||
django-ses==1.0.0
|
django-ses==0.8.10
|
||||||
django-shell-plus==1.1.7
|
django-shell-plus==1.1.7
|
||||||
django-social-share==1.3.2
|
django-social-share==1.3.2
|
||||||
django-sslserver==0.22
|
django-suit==0.2.28
|
||||||
django-suit==0.2.26
|
|
||||||
django-suit-rq==1.0.1
|
django-suit-rq==1.0.1
|
||||||
django-tz-detect==0.2.9
|
django-tz-detect==0.2.9
|
||||||
djangorestframework==3.9.2
|
djangorestframework==3.9.2
|
||||||
@@ -71,17 +68,17 @@ entrypoints==0.3
|
|||||||
execnet==1.5.0
|
execnet==1.5.0
|
||||||
factory-boy==2.11.1
|
factory-boy==2.11.1
|
||||||
Faker==1.0.4
|
Faker==1.0.4
|
||||||
fastparquet==0.4.1
|
fastparquet==0.3.2
|
||||||
fitparse==1.1.0
|
fitparse==1.1.0
|
||||||
Flask==1.0.2
|
Flask==1.0.2
|
||||||
fsspec==0.5.2
|
fsspec==0.5.2
|
||||||
future==0.17.1
|
future==0.17.1
|
||||||
garminconnect==0.1.14
|
|
||||||
geocoder==1.38.1
|
geocoder==1.38.1
|
||||||
geoip2==3.0.0
|
geoip2==3.0.0
|
||||||
geos==0.2.1
|
geos==0.2.1
|
||||||
grpcio==1.26.0
|
grpcio==1.26.0
|
||||||
grpcio-tools==1.26.0
|
grpcio-tools==1.26.0
|
||||||
|
gunicorn==20.0.4
|
||||||
holoviews==1.13.5
|
holoviews==1.13.5
|
||||||
html5lib==1.0.1
|
html5lib==1.0.1
|
||||||
htmlmin==0.1.12
|
htmlmin==0.1.12
|
||||||
@@ -103,15 +100,14 @@ itypes==1.1.0
|
|||||||
jedi==0.13.3
|
jedi==0.13.3
|
||||||
jeepney==0.4
|
jeepney==0.4
|
||||||
Jinja2==2.10
|
Jinja2==2.10
|
||||||
jmespath==0.10.0
|
|
||||||
json5==0.8.5
|
json5==0.8.5
|
||||||
jsonschema==3.0.1
|
jsonschema==3.0.1
|
||||||
jupyter==1.0.0
|
jupyter==1.0.0
|
||||||
jupyter-client==5.2.4
|
jupyter-client==6.1.7
|
||||||
jupyter-console==6.0.0
|
jupyter-console==6.2.0
|
||||||
jupyter-core==4.4.0
|
jupyter-core==4.7.0
|
||||||
jupyterlab==0.35.4
|
jupyterlab==0.35.6
|
||||||
jupyterlab-server==0.3.0
|
jupyterlab-server==0.2.0
|
||||||
keyring==18.0.0
|
keyring==18.0.0
|
||||||
kiwisolver==1.0.1
|
kiwisolver==1.0.1
|
||||||
kombu==4.5.0
|
kombu==4.5.0
|
||||||
@@ -121,7 +117,6 @@ Markdown==3.0.1
|
|||||||
MarkupSafe==1.1.1
|
MarkupSafe==1.1.1
|
||||||
matplotlib==3.0.3
|
matplotlib==3.0.3
|
||||||
maxminddb==1.5.4
|
maxminddb==1.5.4
|
||||||
minify==0.1.4
|
|
||||||
MiniMockTest==0.5
|
MiniMockTest==0.5
|
||||||
mistune==0.8.4
|
mistune==0.8.4
|
||||||
mock==2.0.0
|
mock==2.0.0
|
||||||
@@ -151,7 +146,6 @@ pexpect==4.6.0
|
|||||||
pickleshare==0.7.5
|
pickleshare==0.7.5
|
||||||
Pillow==8.0.1
|
Pillow==8.0.1
|
||||||
pip-upgrader==1.4.6
|
pip-upgrader==1.4.6
|
||||||
pkginfo==1.6.0
|
|
||||||
pluggy==0.9.0
|
pluggy==0.9.0
|
||||||
prometheus-client==0.6.0
|
prometheus-client==0.6.0
|
||||||
prompt-toolkit==2.0.9
|
prompt-toolkit==2.0.9
|
||||||
@@ -159,13 +153,11 @@ protobuf==3.11.1
|
|||||||
psycopg2==2.8.1
|
psycopg2==2.8.1
|
||||||
ptyprocess==0.6.0
|
ptyprocess==0.6.0
|
||||||
py==1.8.0
|
py==1.8.0
|
||||||
pyarrow==0.17.1
|
pyarrow==2.0.0
|
||||||
pycairo==1.20.0
|
pycairo==1.19.0
|
||||||
pycparser==2.19
|
pycparser==2.19
|
||||||
pyct==0.4.8
|
pyct==0.4.8
|
||||||
pygeoip==0.3.2
|
Pygments==2.3.1
|
||||||
Pygments==2.7.1
|
|
||||||
pyOpenSSL==19.1.0
|
|
||||||
pyparsing==2.3.1
|
pyparsing==2.3.1
|
||||||
pyrsistent==0.14.11
|
pyrsistent==0.14.11
|
||||||
pyshp==2.1.0
|
pyshp==2.1.0
|
||||||
@@ -174,7 +166,6 @@ pytest-django==3.4.8
|
|||||||
pytest-forked==1.0.2
|
pytest-forked==1.0.2
|
||||||
pytest-runner==4.4
|
pytest-runner==4.4
|
||||||
pytest-sugar==0.9.2
|
pytest-sugar==0.9.2
|
||||||
pytest-timeout==1.4.2
|
|
||||||
pytest-xdist==1.27.0
|
pytest-xdist==1.27.0
|
||||||
python-dateutil==2.8.0
|
python-dateutil==2.8.0
|
||||||
python-memcached==1.59
|
python-memcached==1.59
|
||||||
@@ -182,23 +173,18 @@ python-twitter==3.5
|
|||||||
pytz==2020.1
|
pytz==2020.1
|
||||||
pyviz-comms==0.7.6
|
pyviz-comms==0.7.6
|
||||||
pywin32-ctypes==0.2.0
|
pywin32-ctypes==0.2.0
|
||||||
pywinpty==0.5.5
|
|
||||||
PyYAML==5.1
|
PyYAML==5.1
|
||||||
pyzmq==18.0.1
|
pyzmq==18.0.1
|
||||||
qtconsole==4.4.3
|
qtconsole==4.4.3
|
||||||
ratelim==0.1.6
|
ratelim==0.1.6
|
||||||
readme-renderer==28.0
|
redis==3.5.3
|
||||||
redis==3.2.1
|
|
||||||
requests==2.23.0
|
requests==2.23.0
|
||||||
requests-oauthlib==1.2.0
|
requests-oauthlib==1.2.0
|
||||||
requests-toolbelt==0.9.1
|
|
||||||
rfc3986==1.4.0
|
|
||||||
rowingdata==3.0.6
|
rowingdata==3.0.6
|
||||||
rowingphysics==0.5.0
|
rowingphysics==0.5.0
|
||||||
rq==0.13.0
|
rq==0.13.0
|
||||||
rules==2.1
|
rules==2.1
|
||||||
s3transfer==0.3.3
|
scipy==1.5.4
|
||||||
scipy==1.5.0
|
|
||||||
SecretStorage==3.1.1
|
SecretStorage==3.1.1
|
||||||
Send2Trash==1.5.0
|
Send2Trash==1.5.0
|
||||||
shell==1.0.1
|
shell==1.0.1
|
||||||
@@ -221,7 +207,6 @@ toolz==0.10.0
|
|||||||
tornado==6.0.1
|
tornado==6.0.1
|
||||||
tqdm==4.31.1
|
tqdm==4.31.1
|
||||||
traitlets==4.3.2
|
traitlets==4.3.2
|
||||||
twine==3.2.0
|
|
||||||
typing-extensions==3.7.4.3
|
typing-extensions==3.7.4.3
|
||||||
units==0.7
|
units==0.7
|
||||||
uritemplate==3.0.0
|
uritemplate==3.0.0
|
||||||
@@ -236,4 +221,3 @@ xlrd==1.2.0
|
|||||||
xmltodict==0.12.0
|
xmltodict==0.12.0
|
||||||
yamjam==0.1.7
|
yamjam==0.1.7
|
||||||
yamllint==1.15.0
|
yamllint==1.15.0
|
||||||
yuicompressor==2.4.8
|
|
||||||
|
|||||||
@@ -1430,6 +1430,78 @@ def create_row_df(r,distance,duration,startdatetime,workouttype='rower',
|
|||||||
|
|
||||||
from rowers.utils import totaltime_sec_to_string
|
from rowers.utils import totaltime_sec_to_string
|
||||||
|
|
||||||
|
def checkbreakthrough(w, r):
|
||||||
|
isbreakthrough = False
|
||||||
|
ishard = False
|
||||||
|
workouttype = w.workouttype
|
||||||
|
if workouttype in rowtypes:
|
||||||
|
cpdf,delta,cpvalues = setcp(w)
|
||||||
|
if not cpdf.empty:
|
||||||
|
if workouttype in otwtypes:
|
||||||
|
res, btvalues, res2 = utils.isbreakthrough(
|
||||||
|
delta, cpvalues, r.p0, r.p1, r.p2, r.p3, r.cpratio)
|
||||||
|
success = update_rolling_cp(r,otwtypes,'water')
|
||||||
|
|
||||||
|
elif workouttype in otetypes:
|
||||||
|
res, btvalues, res2 = utils.isbreakthrough(
|
||||||
|
delta, cpvalues, r.ep0, r.ep1, r.ep2, r.ep3, r.ecpratio)
|
||||||
|
success = update_rolling_cp(r,otetypes,'erg')
|
||||||
|
else:
|
||||||
|
res = 0
|
||||||
|
res2 = 0
|
||||||
|
if res:
|
||||||
|
isbreakthrough = True
|
||||||
|
if res2 and not isbreakthrough:
|
||||||
|
ishard = True
|
||||||
|
|
||||||
|
# submit email task to send email about breakthrough workout
|
||||||
|
if isbreakthrough:
|
||||||
|
if r.getemailnotifications and not r.emailbounced:
|
||||||
|
job = myqueue(queuehigh,handle_sendemail_breakthrough,
|
||||||
|
w.id,
|
||||||
|
r.user.email,
|
||||||
|
r.user.first_name,
|
||||||
|
r.user.last_name,
|
||||||
|
btvalues=btvalues.to_json())
|
||||||
|
|
||||||
|
# submit email task to send email about breakthrough workout
|
||||||
|
if ishard:
|
||||||
|
if r.getemailnotifications and not r.emailbounced:
|
||||||
|
job = myqueue(queuehigh,handle_sendemail_hard,
|
||||||
|
w.id,
|
||||||
|
r.user.email,
|
||||||
|
r.user.first_name,
|
||||||
|
r.user.last_name,
|
||||||
|
btvalues=btvalues.to_json())
|
||||||
|
|
||||||
|
return isbreakthrough, ishard
|
||||||
|
|
||||||
|
|
||||||
|
def checkduplicates(r,workoutdate,workoutstartdatetime,workoutenddatetime):
|
||||||
|
duplicate = False
|
||||||
|
ws = Workout.objects.filter(user=r,date=workoutdate,duplicate=False).exclude(
|
||||||
|
startdatetime__gt=workoutenddatetime
|
||||||
|
)
|
||||||
|
|
||||||
|
ws2 = []
|
||||||
|
|
||||||
|
for ww in ws:
|
||||||
|
t = ww.duration
|
||||||
|
delta = datetime.timedelta(hours=t.hour, minutes=t.minute, seconds=t.second)
|
||||||
|
enddatetime = ww.startdatetime+delta
|
||||||
|
print(enddatetime,workoutstartdatetime)
|
||||||
|
if enddatetime > workoutstartdatetime:
|
||||||
|
ws2.append(ww)
|
||||||
|
|
||||||
|
|
||||||
|
if (len(ws2) != 0):
|
||||||
|
message = "Warning: This workout overlaps with an existing one and was marked as a duplicate"
|
||||||
|
duplicate = True
|
||||||
|
return duplicate
|
||||||
|
|
||||||
|
return duplicate
|
||||||
|
|
||||||
|
|
||||||
# Processes painsled CSV file to database
|
# Processes painsled CSV file to database
|
||||||
def save_workout_database(f2, r, dosmooth=True, workouttype='rower',
|
def save_workout_database(f2, r, dosmooth=True, workouttype='rower',
|
||||||
boattype='1x',
|
boattype='1x',
|
||||||
@@ -1638,23 +1710,7 @@ def save_workout_database(f2, r, dosmooth=True, workouttype='rower',
|
|||||||
workoutenddatetime = workoutstartdatetime+delta
|
workoutenddatetime = workoutstartdatetime+delta
|
||||||
|
|
||||||
# check for duplicate start times and duration
|
# check for duplicate start times and duration
|
||||||
ws = Workout.objects.filter(user=r,date=workoutdate,duplicate=False).exclude(
|
duplicate = checkduplicates(r,workoutdate,workoutstartdatetime,workoutenddatetime)
|
||||||
startdatetime__gt=workoutenddatetime
|
|
||||||
)
|
|
||||||
|
|
||||||
ws2 = []
|
|
||||||
|
|
||||||
for ww in ws:
|
|
||||||
t = ww.duration
|
|
||||||
delta = datetime.timedelta(hours=t.hour, minutes=t.minute, seconds=t.second)
|
|
||||||
enddatetime = ww.startdatetime+delta
|
|
||||||
if enddatetime > workoutstartdatetime:
|
|
||||||
ws2.append(ww)
|
|
||||||
|
|
||||||
|
|
||||||
if (len(ws2) != 0):
|
|
||||||
message = "Warning: This workout overlaps with an existing one and was marked as a duplicate"
|
|
||||||
duplicate = True
|
|
||||||
|
|
||||||
# test title length
|
# test title length
|
||||||
if title is not None and len(title)>140:
|
if title is not None and len(title)>140:
|
||||||
@@ -1701,48 +1757,7 @@ def save_workout_database(f2, r, dosmooth=True, workouttype='rower',
|
|||||||
|
|
||||||
job = myqueue(queuehigh,handle_calctrimp,w.id,f2,r.ftp,r.sex,r.hrftp,r.max,r.rest)
|
job = myqueue(queuehigh,handle_calctrimp,w.id,f2,r.ftp,r.sex,r.hrftp,r.max,r.rest)
|
||||||
|
|
||||||
|
isbreakthrough, ishard = checkbreakthrough(w, r)
|
||||||
isbreakthrough = False
|
|
||||||
ishard = False
|
|
||||||
if workouttype in rowtypes:
|
|
||||||
cpdf,delta,cpvalues = setcp(w)
|
|
||||||
if not cpdf.empty:
|
|
||||||
if workouttype in otwtypes:
|
|
||||||
res, btvalues, res2 = utils.isbreakthrough(
|
|
||||||
delta, cpvalues, r.p0, r.p1, r.p2, r.p3, r.cpratio)
|
|
||||||
success = update_rolling_cp(r,otwtypes,'water')
|
|
||||||
|
|
||||||
elif workouttype in otetypes:
|
|
||||||
res, btvalues, res2 = utils.isbreakthrough(
|
|
||||||
delta, cpvalues, r.ep0, r.ep1, r.ep2, r.ep3, r.ecpratio)
|
|
||||||
success = update_rolling_cp(r,otetypes,'erg')
|
|
||||||
else:
|
|
||||||
res = 0
|
|
||||||
res2 = 0
|
|
||||||
if res:
|
|
||||||
isbreakthrough = True
|
|
||||||
if res2 and not isbreakthrough:
|
|
||||||
ishard = True
|
|
||||||
|
|
||||||
# submit email task to send email about breakthrough workout
|
|
||||||
if isbreakthrough:
|
|
||||||
if r.getemailnotifications and not r.emailbounced:
|
|
||||||
job = myqueue(queuehigh,handle_sendemail_breakthrough,
|
|
||||||
w.id,
|
|
||||||
r.user.email,
|
|
||||||
r.user.first_name,
|
|
||||||
r.user.last_name,
|
|
||||||
btvalues=btvalues.to_json())
|
|
||||||
|
|
||||||
# submit email task to send email about breakthrough workout
|
|
||||||
if ishard:
|
|
||||||
if r.getemailnotifications and not r.emailbounced:
|
|
||||||
job = myqueue(queuehigh,handle_sendemail_hard,
|
|
||||||
w.id,
|
|
||||||
r.user.email,
|
|
||||||
r.user.first_name,
|
|
||||||
r.user.last_name,
|
|
||||||
btvalues=btvalues.to_json())
|
|
||||||
|
|
||||||
return (w.id, message)
|
return (w.id, message)
|
||||||
|
|
||||||
|
|||||||
@@ -542,7 +542,6 @@ def do_sync(w,options, quick=False):
|
|||||||
w.uploadedtogarmin = options['garminid']
|
w.uploadedtogarmin = options['garminid']
|
||||||
w.save()
|
w.save()
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print('keyerror')
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -209,13 +209,16 @@ def analysis_new(request,userid=0,function='boxplot',teamid=0,id=''):
|
|||||||
query = request.POST.get('q')
|
query = request.POST.get('q')
|
||||||
if query:
|
if query:
|
||||||
query_list = query.split()
|
query_list = query.split()
|
||||||
workouts = workouts.filter(
|
try:
|
||||||
reduce(operator.and_,
|
workouts = workouts.filter(
|
||||||
(Q(name__icontains=q) for q in query_list)) |
|
reduce(operator.and_,
|
||||||
reduce(operator.and_,
|
(Q(name__icontains=q) for q in query_list)) |
|
||||||
|
reduce(operator.and_,
|
||||||
(Q(notes__icontains=q) for q in query_list))
|
(Q(notes__icontains=q) for q in query_list))
|
||||||
)
|
)
|
||||||
searchform = SearchForm(initial={'q':query})
|
searchform = SearchForm(initial={'q':query})
|
||||||
|
except TypeError:
|
||||||
|
searchform = SearchForm()
|
||||||
else:
|
else:
|
||||||
searchform = SearchForm()
|
searchform = SearchForm()
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,11 @@ from __future__ import print_function
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from rowers.views.statements import *
|
from rowers.views.statements import *
|
||||||
|
from rowers.tasks import handle_calctrimp
|
||||||
|
from rowers.mailprocessing import send_confirm
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import arrow
|
||||||
|
|
||||||
# Stroke data form to test API upload
|
# Stroke data form to test API upload
|
||||||
@login_required()
|
@login_required()
|
||||||
@@ -117,10 +121,14 @@ def strokedatajson_v2(request,id):
|
|||||||
logfile.write(str(timezone.now())+": ")
|
logfile.write(str(timezone.now())+": ")
|
||||||
logfile.write(request.user.username+" (strokedatajson_v2 POST) \n")
|
logfile.write(request.user.username+" (strokedatajson_v2 POST) \n")
|
||||||
try:
|
try:
|
||||||
logfile.write(request.data['data']+"\n")
|
for d in request.data['data']:
|
||||||
|
logfile.write(json.dumps(d))
|
||||||
|
logfile.write("\n")
|
||||||
except KeyError:
|
except KeyError:
|
||||||
try:
|
try:
|
||||||
logfile.write(request.data['strokedata']+"\n")
|
for d in request.data['strokedata']:
|
||||||
|
logfile.write(json.dumps(d))
|
||||||
|
logfile.write("\n")
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logfile.write("No data in request.data\n")
|
logfile.write("No data in request.data\n")
|
||||||
except (AttributeError,TypeError):
|
except (AttributeError,TypeError):
|
||||||
@@ -255,7 +263,7 @@ def strokedatajson_v2(request,id):
|
|||||||
unixtime = starttime+time
|
unixtime = starttime+time
|
||||||
|
|
||||||
with open('apilog.log','a') as logfile:
|
with open('apilog.log','a') as logfile:
|
||||||
logfile.write(str(starttime)+": ")
|
logfile.write(str(arrow.get(starttime).datetime)+": ")
|
||||||
logfile.write(request.user.username+"(strokedatajson_v2 POST - data parsed) \r\n")
|
logfile.write(request.user.username+"(strokedatajson_v2 POST - data parsed) \r\n")
|
||||||
|
|
||||||
|
|
||||||
@@ -288,6 +296,15 @@ def strokedatajson_v2(request,id):
|
|||||||
timestr = row.startdatetime.strftime("%Y%m%d-%H%M%S")
|
timestr = row.startdatetime.strftime("%Y%m%d-%H%M%S")
|
||||||
csvfilename ='media/Import_'+timestr+'.csv'
|
csvfilename ='media/Import_'+timestr+'.csv'
|
||||||
|
|
||||||
|
workoutdate = row.date
|
||||||
|
workoutstartdatetime = row.startdatetime
|
||||||
|
workoutenddatetime = workoutstartdatetime+datetime.timedelta(seconds=data[' ElapsedTime (sec)'].max())
|
||||||
|
|
||||||
|
duplicate = dataprep.checkduplicates(r,workoutdate,workoutstartdatetime,workoutenddatetime)
|
||||||
|
if duplicate:
|
||||||
|
row.duplicate = True
|
||||||
|
row.save()
|
||||||
|
|
||||||
res = data.to_csv(csvfilename+'.gz',index_label='index',
|
res = data.to_csv(csvfilename+'.gz',index_label='index',
|
||||||
compression='gzip')
|
compression='gzip')
|
||||||
row.csvfilename = csvfilename
|
row.csvfilename = csvfilename
|
||||||
@@ -310,6 +327,22 @@ def strokedatajson_v2(request,id):
|
|||||||
|
|
||||||
datadf = dataprep.dataprep(rowdata,id=row.id,bands=True,barchart=True,otwpower=True,empower=True)
|
datadf = dataprep.dataprep(rowdata,id=row.id,bands=True,barchart=True,otwpower=True,empower=True)
|
||||||
|
|
||||||
|
job = myqueue(queuehigh, handle_calctrimp, row.id, row.csvfilename, r.ftp,r.sex,r.hrftp, r.max, r.rest)
|
||||||
|
|
||||||
|
isbreakthrough, ishard = dataprep.checkbreakthrough(row, r)
|
||||||
|
|
||||||
|
if r.getemailnotifications and not r.emailbounced:
|
||||||
|
link = settings.SITE_URL+reverse(
|
||||||
|
r.defaultlandingpage,
|
||||||
|
kwargs = {
|
||||||
|
'id':encoder.encode_hex(row.id),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
email_sent = send_confirm(r.user, row.name, link, '')
|
||||||
|
|
||||||
|
result = uploads.do_sync(row,{},quick=True)
|
||||||
|
|
||||||
|
|
||||||
with open('apilog.log','a') as logfile:
|
with open('apilog.log','a') as logfile:
|
||||||
logfile.write(str(timezone.now())+": ")
|
logfile.write(str(timezone.now())+": ")
|
||||||
logfile.write(request.user.username+" (strokedatajson_v2 POST completed successfully) \n")
|
logfile.write(request.user.username+" (strokedatajson_v2 POST completed successfully) \n")
|
||||||
|
|||||||
@@ -1717,7 +1717,7 @@ def workout_getimportview(request,externalid,source = 'c2'):
|
|||||||
try:
|
try:
|
||||||
notes = data['comments']
|
notes = data['comments']
|
||||||
name = notes[:40]
|
name = notes[:40]
|
||||||
except KeyError:
|
except (KeyError,TypeError):
|
||||||
comments = 'C2 Import Workout from {startdatetime}'.format(startdatetime=startdatetime)
|
comments = 'C2 Import Workout from {startdatetime}'.format(startdatetime=startdatetime)
|
||||||
name = notes
|
name = notes
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user