Private
Public Access
1
0

review duplicates upon delete workout

This commit is contained in:
Sander Roosendaal
2018-11-14 10:26:40 +01:00
parent 779a07c96b
commit 840e0b55f3
4 changed files with 46 additions and 5 deletions

View File

@@ -2182,7 +2182,42 @@ def auto_delete_file_on_delete(sender, instance, **kwargs):
if instance.csvfilename+'.gz':
if os.path.isfile(instance.csvfilename+'.gz'):
os.remove(instance.csvfilename+'.gz')
@receiver(models.signals.post_delete,sender=Workout)
def update_duplicates_on_delete(sender, instance, **kwargs):
if instance.id:
duplicates = Workout.objects.filter(
user=instance.user,date=instance.date,
duplicate=True)
for d in duplicates:
t = d.duration
delta = datetime.timedelta(hours=t.hour, minutes=t.minute, seconds=t.second)
workoutenddatetime = d.startdatetime+delta
ws = Workout.objects.filter(
user=d.user,date=d.date,
).exclude(
pk__in=[instance.pk,d.pk]
).exclude(
startdatetime__gt=workoutenddatetime
)
ws2 = []
for ww in ws:
t = ww.duration
delta = datetime.timedelta(hours=t.hour, minutes=t.minute, seconds=t.second)
enddatetime = ww.startdatetime+delta
if enddatetime > d.startdatetime:
ws2.append(ww)
if len(ws2) == 0:
d.duplicate=False
d.save()
# Delete stroke data from the database when a workout is deleted
@receiver(models.signals.post_delete,sender=Workout)
def auto_delete_strokedata_on_delete(sender, instance, **kwargs):