Private
Public Access
1
0

hrefs added trailing slashes

This commit is contained in:
Sander Roosendaal
2019-01-09 22:21:28 +01:00
parent a7af332db6
commit c454df269e
42 changed files with 3438 additions and 319 deletions

View File

@@ -40,11 +40,15 @@ from rowers import urls
from rowers.views import error500_view,error404_view,error400_view,error403_view
from rowers.dataprep import delete_strokedata
from rowers.tests.mocks import *
from redis import StrictRedis
redis_connection = StrictRedis()
tested = [
'/rowers/me/delete/'
]
#@pytest.mark.django_db
class URLTests(TestCase):
def setUp(self):
@@ -182,7 +186,7 @@ class URLTests(TestCase):
'/rowers/partners/',
'/rowers/physics/',
'/rowers/planrequired/',
'/rowers/promembership/',
# '/rowers/promembership/',
'/rowers/register/',
'/rowers/register/thankyou/',
'/rowers/sessions/',
@@ -263,7 +267,7 @@ class URLTests(TestCase):
'/rowers/workout/1/view/',
'/rowers/workout/1/wind/',
'/rowers/workout/1/workflow/',
'/rowers/workout/compare/1/2016-01-01/2016-12-31/',
# '/rowers/workout/compare/1/2016-01-01/2016-12-31/',
'/rowers/workout/fusion/1/',
'/rowers/workout/fusion/1/2016-01-01/2016-12-31/',
'/rowers/workout/upload/',
@@ -280,22 +284,46 @@ class URLTests(TestCase):
lijst.append(
(url,200)
)
@parameterized.expand(lijst)
@patch('rowers.dataprep.create_engine')
@patch('rowers.dataprep.read_df_sql')
@patch('rowers.dataprep.getsmallrowdata_db')
@patch('requests.get',side_effect=mocked_requests)
@patch('requests.post',side_effect=mocked_requests)
def test_url_generator(self,url,expected,
mocked_sqlalchemy,
mocked_read_df_sql,
mocked_getsmallrowdata_db):
login = self.c.login(username='john',password='koeinsloot')
self.assertTrue(login)
response = self.c.get(url,follow=True)
if response.status_code != expected:
print url
print response.status_code
self.assertEqual(response.status_code,
mocked_getsmallrowdata_db,
mock_get,
mock_post):
if url not in tested:
login = self.c.login(username='john',password='koeinsloot')
self.assertTrue(login)
response = self.c.get(url,follow=True)
if response.status_code != expected:
print url
print response.status_code
self.assertEqual(response.status_code,
expected)
html = BeautifulSoup(response.content,'html.parser')
urls = [a['href'] for a in html.find_all('a')]
for u in urls:
if u not in tested and 'rowers' in u and 'http' not in u and 'authorize' not in u and 'import' not in u and 'logout' not in u:
response = self.c.get(u)
if response.status_code != 200:
print len(tested)
print url
print u
print response.status_code
tested.append(u)
self.assertIn(response.status_code,
[200,302])
else:
tested.append(u)