Private
Public Access
1
0

status progress bar works on develop

This commit is contained in:
Sander Roosendaal
2017-11-01 17:16:08 +01:00
parent fb85e5f53b
commit ac5500a1d8
7 changed files with 212 additions and 41 deletions

View File

@@ -1,3 +1,4 @@
from __future__ import absolute_import
import numpy as np import numpy as np
import time import time
@@ -10,6 +11,24 @@ redis_connection = StrictRedis()
import redis import redis
import threading import threading
def getvalue(data):
perc = 0
total = 1
done = 0
id = 0
session_key = 'noot'
for i in data.iteritems():
if i[0] == 'total':
total = float(i[1])
if i[0] == 'done':
done = float(i[1])
if i[0] == 'id':
id = i[1]
if i[0] == 'session_key':
session_key = i[1]
return total,done,id,session_key
class Listener(threading.Thread): class Listener(threading.Thread):
def __init__(self, r, channels): def __init__(self, r, channels):
threading.Thread.__init__(self) threading.Thread.__init__(self)
@@ -18,7 +37,14 @@ class Listener(threading.Thread):
self.pubsub.subscribe(channels) self.pubsub.subscribe(channels)
def work(self, item): def work(self, item):
print item['channel'], ":", item['data'] try:
data = json.loads(item['data'])
total,done,id,session_key = getvalue(data)
perc = 100.*done/total
print perc, '%'
print session_key, id
except TypeError:
print "invalid data"
def run(self): def run(self):
for item in self.pubsub.listen(): for item in self.pubsub.listen():
@@ -30,13 +56,15 @@ class Listener(threading.Thread):
self.work(item) self.work(item)
def longtask(aantal,jobid=None,debug=False): def longtask(aantal,jobid=None,debug=False,
if jobid: session_key=None):
if debug: counter = 0
job = celery_result.AsyncResult(jobid) # if jobid:
else: # if debug:
job = Job.fetch(jobid,connection=redis_connection) # job = celery_result.AsyncResult(jobid)
counter = 0 # else:
# job = Job.fetch(jobid,connection=redis_connection)
channel = 'tasks' channel = 'tasks'
for i in range(aantal): for i in range(aantal):
time.sleep(1) time.sleep(1)
@@ -52,10 +80,10 @@ def longtask(aantal,jobid=None,debug=False):
'done':i, 'done':i,
'total':aantal, 'total':aantal,
'id':jobid, 'id':jobid,
'session_key':session_key,
} }
)) ))
redis_connection.publish(channel,'KILL')
return 1 return 1

View File

@@ -46,10 +46,11 @@ def add(x, y):
@app.task(bind=True) @app.task(bind=True)
def long_test_task(self,aantal,debug=False,job=None): def long_test_task(self,aantal,debug=False,job=None,session_key=None):
job = self.request job = self.request
print job.id
return longtask.longtask(aantal,jobid=job.id,debug=debug) return longtask.longtask(aantal,jobid=job.id,debug=debug,
session_key=session_key)
# create workout # create workout
@app.task @app.task

View File

@@ -4,6 +4,53 @@
{% block title %}Rowsandall - Tasks {% endblock %} {% block title %}Rowsandall - Tasks {% endblock %}
{% block meta %}
<link rel="stylesheet" href="//code.jquery.com/ui/1.12.1/themes/base/jquery-ui.css">
<style type="text/css">
.progressBar div {
border-radius:5px;
height: 100%;
/* padding: 4px 10px; */
font-size: 15px;
color: #fff;
text-align: right;
line-height: 22px;
width: 0;
vertical-align: middle;
background-color: #0099ff;
}
</style>
<script type='text/javascript'
src='https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js'>
</script>
<script src="https://code.jquery.com/ui/1.12.1/jquery-ui.js"></script>
<script>
$(document).ready(function(){
$('.progressBar').each(function() {
var bar = $(this);
var maxvalue = $(this).attr('data');
maxvalue = 0;
var text = $(this).children('div').data('show');
progress1(maxvalue, bar, text);
});
$('.progressBar').each(function() {
var bar = $(this);
var maxvalue = $(this).attr('data');
maxvalue = maxvalue.substring(3);
var text = $(this).children('div').data('show');
progress(maxvalue, bar, text);
});
});
function progress1(percent, element, text) {
element.find('div').animate({ width: percent+'%' }, 1).html(text +"&nbsp;"+ percent + "%&nbsp;");
}
function progress(percent, element, text) {
element.find('div').animate({ width: percent+'%' }, 'slow').html(text +"&nbsp;"+ percent + "%&nbsp;");
}
</script>
{% endblock %}
{% block content %} {% block content %}
@@ -16,7 +63,8 @@
<thead> <thead>
<tr> <tr>
<th>ID</th> <th>ID</th>
<th style="width:180">Task</th> <th>Task</th>
<th>Progress</th>
<th>Status</th> <th>Status</th>
<th>Action</th> <th>Action</th>
</tr> </tr>
@@ -31,6 +79,12 @@
{{ task|lookup:'verbose' }} {{ task|lookup:'verbose' }}
</td> </td>
<td> <td>
<div class="progressBar" data="max{{ task|lookup:'progress' }}" style="width: 100%;">
<div data-show=""> {{ task|lookup:'progress' }}</div>
</div>
</td>
<td>
{{ task|lookup:'status' }} {{ task|lookup:'status' }}
</td> </td>
{% if task|lookup:'failed' %} {% if task|lookup:'failed' %}

View File

@@ -1,6 +1,8 @@
from django import template from django import template
from django.utils.safestring import mark_safe
from time import strftime from time import strftime
import dateutil.parser import dateutil.parser
import json
register = template.Library() register = template.Library()
@@ -65,6 +67,11 @@ def deltatimeprint(d):
return strfdeltah(d) return strfdeltah(d)
@register.filter(is_safe=True)
def jsdict(dict,key):
s = dict.get(key)
return mark_safe(json.dumps(s))
@register.filter @register.filter
def lookup(dict, key): def lookup(dict, key):
s = dict.get(key) s = dict.get(key)

View File

@@ -5,6 +5,7 @@ import colorsys
from django.conf import settings from django.conf import settings
lbstoN = 4.44822 lbstoN = 4.44822
landingpages = ( landingpages = (
@@ -226,3 +227,5 @@ def myqueue(queue,function,*args,**kwargs):
job = queue.enqueue(function,*args,**kwargs) job = queue.enqueue(function,*args,**kwargs)
return job return job

View File

@@ -14,6 +14,9 @@ from django.views.generic.base import TemplateView
from django.db.models import Q from django.db.models import Q
from django import template from django import template
from django.db import IntegrityError, transaction from django.db import IntegrityError, transaction
#from django.contrib.sessions.backends.db import SessionStore
from importlib import import_module
from django.contrib.sessions.models import Session
from django.shortcuts import render from django.shortcuts import render
from django.http import ( from django.http import (
HttpResponse, HttpResponseRedirect, HttpResponse, HttpResponseRedirect,
@@ -131,16 +134,70 @@ queue = django_rq.get_queue('default')
queuelow = django_rq.get_queue('low') queuelow = django_rq.get_queue('low')
queuehigh = django_rq.get_queue('low') queuehigh = django_rq.get_queue('low')
import redis
import threading
from redis import StrictRedis,Redis from redis import StrictRedis,Redis
from rq.exceptions import NoSuchJobError from rq.exceptions import NoSuchJobError
from rq.registry import StartedJobRegistry from rq.registry import StartedJobRegistry
from rq import Queue,cancel_job from rq import Queue,cancel_job
from django.core.cache import cache
# Redis related
session_engine = import_module(settings.SESSION_ENGINE)
def getvalue(data):
perc = 0
total = 1
done = 0
id = 0
session_key = 'noot'
for i in data.iteritems():
if i[0] == 'total':
total = float(i[1])
if i[0] == 'done':
done = float(i[1])
if i[0] == 'id':
id = i[1]
if i[0] == 'session_key':
session_key = i[1]
return total,done,id,session_key
class SessionTaskListener(threading.Thread):
def __init__(self, r, channels):
threading.Thread.__init__(self)
self.redis = r
self.pubsub = self.redis.pubsub()
self.pubsub.subscribe(channels)
def work(self, item):
try:
data = json.loads(item['data'])
total,done,id,session_key = getvalue(data)
perc = 100.*done/total
cache.set(id,perc)
except TypeError:
pass
def run(self):
for item in self.pubsub.listen():
if item['data'] == "KILL":
self.pubsub.unsubscribe()
print self, "unsubscribed and finished"
break
else:
self.work(item)
queuefailed = Queue("failed",connection=Redis()) queuefailed = Queue("failed",connection=Redis())
redis_connection = StrictRedis() redis_connection = StrictRedis()
r = Redis() r = Redis()
from .longtask import Listener
client = Listener(r,['tasks']) client = SessionTaskListener(r,['tasks'])
client.start() client.start()
@@ -187,9 +244,8 @@ def remove_asynctask(request,id):
newtasks = [] newtasks = []
for task in oldtasks: for task in oldtasks:
print task[0]
if id not in task[0]: if id not in task[0]:
newtasks += [(task[0],task[1])] newtasks += [(task[0],task[1],task[2])]
request.session['async_tasks'] = newtasks request.session['async_tasks'] = newtasks
@@ -223,8 +279,6 @@ def get_job_status(jobid):
if settings.DEBUG: if settings.DEBUG:
job = celery_result.AsyncResult(jobid) job = celery_result.AsyncResult(jobid)
jobresult = job.result jobresult = job.result
channel = 'task:<'+job.id+'>:progress'
channel = 'noot'
if 'fail' in job.status.lower(): if 'fail' in job.status.lower():
jobresult = '0' jobresult = '0'
@@ -271,6 +325,7 @@ def kill_async_job(request,id='aap'):
pass pass
remove_asynctask(request,id) remove_asynctask(request,id)
cache.delete(id)
url = reverse(session_jobs_status) url = reverse(session_jobs_status)
return HttpResponseRedirect(url) return HttpResponseRedirect(url)
@@ -278,11 +333,16 @@ def kill_async_job(request,id='aap'):
@login_required() @login_required()
def test_job_view(request,aantal=100): def test_job_view(request,aantal=100):
job = myqueue(queuehigh,long_test_task,int(aantal)) session_key = request.session._session_key
job = myqueue(queuehigh,long_test_task,int(aantal),
session_key=session_key)
try: try:
request.session['async_tasks'] += [(job.id,'long_test_task')] request.session['async_tasks'] += [(job.id,'long_test_task',0)]
except KeyError: except KeyError:
request.session['async_tasks'] = [(job.id,'long_test_task')] request.session['async_tasks'] = [(job.id,'long_test_task',0)]
url = reverse(session_jobs_status) url = reverse(session_jobs_status)
@@ -333,14 +393,31 @@ def get_stored_tasks_status(request):
except KeyError: except KeyError:
taskids = [] taskids = []
taskstatus = [{ taskstatus = []
'id':id, for id,func_name,session_progress in taskids:
'status':get_job_status(id)['status'], progress = 0
'failed':get_job_status(id)['failed'], cached_progress = cache.get(id)
'finished':get_job_status(id)['finished'], finished = get_job_status(id)['finished']
'func_name':func_name, if finished:
'verbose': verbose_job_status[func_name] cache.set(id,100)
} for id,func_name in taskids] progress = 100
elif cached_progress>0:
progress = cached_progress
else:
progress = session_progress
this_task_status = {
'id':id,
'status':get_job_status(id)['status'],
'failed':get_job_status(id)['failed'],
'finished':get_job_status(id)['finished'],
'func_name':func_name,
'verbose': verbose_job_status[func_name],
'progress': progress,
}
taskstatus.append(this_task_status)
return taskstatus return taskstatus
@@ -3268,9 +3345,9 @@ def otwrankings_view(request,theuser=0,
) )
request.session['job_id'] = job.id request.session['job_id'] = job.id
try: try:
request.session['async_tasks'] += [(job.id,'updatecpwater')] request.session['async_tasks'] += [(job.id,'updatecpwater',0)]
except KeyError: except KeyError:
request.session['async_tasks'] = [(job.id,'updatecpwater')] request.session['async_tasks'] = [(job.id,'updatecpwater',0)]
messages.info(request,'New calculation queued. Refresh page or resubmit the date form to get the result') messages.info(request,'New calculation queued. Refresh page or resubmit the date form to get the result')
powerdf = pd.DataFrame({ powerdf = pd.DataFrame({
@@ -3525,9 +3602,9 @@ def oterankings_view(request,theuser=0,
) )
request.session['job_id'] = job.id request.session['job_id'] = job.id
try: try:
request.session['async_tasks'] += [(job.id,'updatecp')] request.session['async_tasks'] += [(job.id,'updatecp',0)]
except KeyError: except KeyError:
request.session['async_tasks'] = [(job.id,'updatecp')] request.session['async_tasks'] = [(job.id,'updatecp',0)]
messages.info(request,'New calculation queued.') messages.info(request,'New calculation queued.')
powerdf = pd.DataFrame({ powerdf = pd.DataFrame({
@@ -5659,9 +5736,9 @@ def workout_otwsetpower_view(request,id=0,message="",successmessage=""):
ratio=r.cpratio) ratio=r.cpratio)
try: try:
request.session['async_tasks'] += [(job.id,'otwsetpower')] request.session['async_tasks'] += [(job.id,'otwsetpower',0)]
except KeyError: except KeyError:
request.session['async_tasks'] = [(job.id,'otwsetpower')] request.session['async_tasks'] = [(job.id,'otwsetpower',0)]
successmessage = 'Your calculations have been submitted. You will receive an email when they are done. You can check the status of your calculations <a href="/rowers/jobs-status/">here</a>' successmessage = 'Your calculations have been submitted. You will receive an email when they are done. You can check the status of your calculations <a href="/rowers/jobs-status/">here</a>'
messages.info(request,successmessage) messages.info(request,successmessage)
@@ -7437,9 +7514,9 @@ def workout_add_chart_view(request,id,plotnr=1):
imagename=imagename imagename=imagename
) )
try: try:
request.session['async_tasks'] += [(jobid,'make_plot')] request.session['async_tasks'] += [(jobid,'make_plot',0)]
except KeyError: except KeyError:
request.session['async_tasks'] = [(jobid,'make_plot')] request.session['async_tasks'] = [(jobid,'make_plot',0)]
try: try:
url = request.session['referer'] url = request.session['referer']

View File

@@ -287,6 +287,7 @@ RQ_QUEUES = {
#SESSION_ENGINE = "django.contrib.sessions.backends.signed_cookies" #SESSION_ENGINE = "django.contrib.sessions.backends.signed_cookies"
#SESSION_ENGINE = "django.contrib.sessions.backends.cached_db" #SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
SESSION_ENGINE = "django.contrib.sessions.backends.cache" SESSION_ENGINE = "django.contrib.sessions.backends.cache"
SESSION_SAVE_EVERY_REQUEST = True
# admin stuff for error reporting # admin stuff for error reporting
SERVER_EMAIL='admin@rowsandall.com' SERVER_EMAIL='admin@rowsandall.com'