From aaf59db570ab7688af72c08bb5bc1edc145e3e07 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 17 Apr 2018 18:08:51 -0700 Subject: [PATCH] Longer time limit for test_paginate_compound_keys It was failing intermittently in Travis - see #209 --- tests/fixtures.py | 8 ++++++-- tests/test_api.py | 6 ++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/tests/fixtures.py b/tests/fixtures.py index 29075d7f..493e5272 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -9,7 +9,7 @@ import tempfile import time -def app_client(): +def app_client(sql_time_limit_ms=None): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, 'test_tables.db') conn = sqlite3.connect(filepath) @@ -22,7 +22,7 @@ def app_client(): [filepath], page_size=50, max_returned_rows=100, - sql_time_limit_ms=20, + sql_time_limit_ms=sql_time_limit_ms or 20, metadata=METADATA, plugins_dir=plugins_dir, ) @@ -32,6 +32,10 @@ def app_client(): yield ds.app().test_client +def app_client_longer_time_limit(): + yield from app_client(200) + + def generate_compound_rows(num): for a, b, c in itertools.islice( itertools.product(string.ascii_lowercase, repeat=3), num diff --git a/tests/test_api.py b/tests/test_api.py index 7d9548b0..2b741590 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,11 +1,13 @@ from .fixtures import ( app_client, + app_client_longer_time_limit, generate_compound_rows, generate_sortable_rows, ) import pytest pytest.fixture(scope='module')(app_client) +pytest.fixture(scope='module')(app_client_longer_time_limit) def test_homepage(app_client): @@ -387,13 +389,13 @@ def test_paginate_tables_and_views(app_client, path, expected_rows, expected_pag assert expected_pages == count -def test_paginate_compound_keys(app_client): +def test_paginate_compound_keys(app_client_longer_time_limit): fetched = [] path = '/test_tables/compound_three_primary_keys.json?_shape=objects' page = 0 while path: page += 1 - response = app_client.get(path, gather_request=False) + response = app_client_longer_time_limit.get(path, gather_request=False) fetched.extend(response.json['rows']) path = response.json['next_url'] assert page < 100