from bs4 import BeautifulSoup as Soup from .fixtures import ( # noqa app_client, app_client_base_url_prefix, app_client_shorter_time_limit, app_client_two_attached_databases, app_client_with_hash, make_app_client, METADATA, ) import json import pathlib import pytest import re import textwrap import urllib.parse def test_homepage(app_client_two_attached_databases): response = app_client_two_attached_databases.get("/") assert response.status == 200 assert "text/html; charset=utf-8" == response.headers["content-type"] soup = Soup(response.body, "html.parser") assert "Datasette Fixtures" == soup.find("h1").text assert ( "An example SQLite database demonstrating Datasette. Sign in as root user" == soup.select(".metadata-description")[0].text.strip() ) # Should be two attached databases assert [ {"href": "/fixtures", "text": "fixtures"}, {"href": "/extra database", "text": "extra database"}, ] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")] # The first attached database should show count text and attached tables h2 = soup.select("h2")[1] assert "extra database" == h2.text.strip() counts_p, links_p = h2.find_all_next("p")[:2] assert ( "2 rows in 1 table, 5 rows in 4 hidden tables, 1 view" == counts_p.text.strip() ) # We should only show visible, not hidden tables here: table_links = [ {"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a") ] assert [ {"href": "/extra database/searchable", "text": "searchable"}, {"href": "/extra database/searchable_view", "text": "searchable_view"}, ] == table_links def test_http_head(app_client): response = app_client.get("/", method="HEAD") assert response.status == 200 def test_homepage_options(app_client): response = app_client.get("/", method="OPTIONS") assert response.status == 405 assert response.text == "Method not allowed" def test_favicon(app_client): response = app_client.get("/favicon.ico") assert response.status == 200 assert "" == response.text def test_static(app_client): response = app_client.get("/-/static/app2.css") assert response.status == 404 response = app_client.get("/-/static/app.css") assert response.status == 200 assert "text/css" == response.headers["content-type"] def test_static_mounts(): with make_app_client( static_mounts=[("custom-static", str(pathlib.Path(__file__).parent))] ) as client: response = client.get("/custom-static/test_html.py") assert response.status == 200 response = client.get("/custom-static/not_exists.py") assert response.status == 404 response = client.get("/custom-static/../LICENSE") assert response.status == 404 def test_memory_database_page(): with make_app_client(memory=True) as client: response = client.get("/:memory:") assert response.status == 200 def test_database_page_redirects_with_url_hash(app_client_with_hash): response = app_client_with_hash.get("/fixtures", allow_redirects=False) assert response.status == 302 response = app_client_with_hash.get("/fixtures") assert "fixtures" in response.text def test_database_page(app_client): response = app_client.get("/fixtures") soup = Soup(response.body, "html.parser") queries_ul = soup.find("h2", text="Queries").find_next_sibling("ul") assert queries_ul is not None assert [ ( "/fixtures/%F0%9D%90%9C%F0%9D%90%A2%F0%9D%90%AD%F0%9D%90%A2%F0%9D%90%9E%F0%9D%90%AC", "𝐜𝐢𝐭𝐢𝐞𝐬", ), ("/fixtures/from_async_hook", "from_async_hook"), ("/fixtures/from_hook", "from_hook"), ("/fixtures/magic_parameters", "magic_parameters"), ("/fixtures/neighborhood_search#fragment-goes-here", "Search neighborhoods"), ("/fixtures/pragma_cache_size", "pragma_cache_size"), ] == sorted( [(a["href"], a.text) for a in queries_ul.find_all("a")], key=lambda p: p[0] ) def test_invalid_custom_sql(app_client): response = app_client.get("/fixtures?sql=.schema") assert response.status == 400 assert "Statement must be a SELECT" in response.text def test_sql_time_limit(app_client_shorter_time_limit): response = app_client_shorter_time_limit.get("/fixtures?sql=select+sleep(0.5)") assert 400 == response.status expected_html_fragment = """ sql_time_limit_ms """.strip() assert expected_html_fragment in response.text def test_row_redirects_with_url_hash(app_client_with_hash): response = app_client_with_hash.get( "/fixtures/simple_primary_key/1", allow_redirects=False ) assert response.status == 302 assert response.headers["Location"].endswith("/1") response = app_client_with_hash.get("/fixtures/simple_primary_key/1") assert response.status == 200 @pytest.mark.xfail def test_row_strange_table_name_with_url_hash(app_client_with_hash): response = app_client_with_hash.get( "/fixtures/table%2Fwith%2Fslashes.csv/3", allow_redirects=False ) assert response.status == 302 assert response.headers["Location"].endswith("/table%2Fwith%2Fslashes.csv/3") response = app_client_with_hash.get("/fixtures/table%2Fwith%2Fslashes.csv/3") assert response.status == 200 @pytest.mark.parametrize( "path,expected_definition_sql", [ ( "/fixtures/facet_cities", """ CREATE TABLE facet_cities ( id integer primary key, name text ); """.strip(), ), ( "/fixtures/compound_three_primary_keys", """ CREATE TABLE compound_three_primary_keys ( pk1 varchar(30), pk2 varchar(30), pk3 varchar(30), content text, PRIMARY KEY (pk1, pk2, pk3) ); CREATE INDEX idx_compound_three_primary_keys_content ON compound_three_primary_keys(content); """.strip(), ), ], ) def test_definition_sql(path, expected_definition_sql, app_client): response = app_client.get(path) pre = Soup(response.body, "html.parser").select_one("pre.wrapped-sql") assert expected_definition_sql == pre.string def test_table_cell_truncation(): with make_app_client(config={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] assert [ "Missi…", "Dogpa…", "SOMA", "Tende…", "Berna…", "Hayes…", "Holly…", "Downt…", "Los F…", "Korea…", "Downt…", "Greek…", "Corkt…", "Mexic…", "Arcad…", ] == [td.string for td in table.findAll("td", {"class": "col-neighborhood"})] def test_row_page_does_not_truncate(): with make_app_client(config={"truncate_cells_html": 5}) as client: response = client.get("/fixtures/facetable/1") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] assert ["Mission"] == [ td.string for td in table.findAll("td", {"class": "col-neighborhood"}) ] def test_add_filter_redirects(app_client): filter_args = urllib.parse.urlencode( {"_filter_column": "content", "_filter_op": "startswith", "_filter_value": "x"} ) path_base = "/fixtures/simple_primary_key" path = path_base + "?" + filter_args response = app_client.get(path, allow_redirects=False) assert response.status == 302 assert response.headers["Location"].endswith("?content__startswith=x") # Adding a redirect to an existing querystring: path = path_base + "?foo=bar&" + filter_args response = app_client.get(path, allow_redirects=False) assert response.status == 302 assert response.headers["Location"].endswith("?foo=bar&content__startswith=x") # Test that op with a __x suffix overrides the filter value path = ( path_base + "?" + urllib.parse.urlencode( { "_filter_column": "content", "_filter_op": "isnull__5", "_filter_value": "x", } ) ) response = app_client.get(path, allow_redirects=False) assert response.status == 302 assert response.headers["Location"].endswith("?content__isnull=5") def test_existing_filter_redirects(app_client): filter_args = { "_filter_column_1": "name", "_filter_op_1": "contains", "_filter_value_1": "hello", "_filter_column_2": "age", "_filter_op_2": "gte", "_filter_value_2": "22", "_filter_column_3": "age", "_filter_op_3": "lt", "_filter_value_3": "30", "_filter_column_4": "name", "_filter_op_4": "contains", "_filter_value_4": "world", } path_base = "/fixtures/simple_primary_key" path = path_base + "?" + urllib.parse.urlencode(filter_args) response = app_client.get(path, allow_redirects=False) assert response.status == 302 assert_querystring_equal( "name__contains=hello&age__gte=22&age__lt=30&name__contains=world", response.headers["Location"].split("?")[1], ) # Setting _filter_column_3 to empty string should remove *_3 entirely filter_args["_filter_column_3"] = "" path = path_base + "?" + urllib.parse.urlencode(filter_args) response = app_client.get(path, allow_redirects=False) assert response.status == 302 assert_querystring_equal( "name__contains=hello&age__gte=22&name__contains=world", response.headers["Location"].split("?")[1], ) # ?_filter_op=exact should be removed if unaccompanied by _fiter_column response = app_client.get(path_base + "?_filter_op=exact", allow_redirects=False) assert response.status == 302 assert "?" not in response.headers["Location"] def test_empty_search_parameter_gets_removed(app_client): path_base = "/fixtures/simple_primary_key" path = ( path_base + "?" + urllib.parse.urlencode( { "_search": "", "_filter_column": "name", "_filter_op": "exact", "_filter_value": "chidi", } ) ) response = app_client.get(path, allow_redirects=False) assert response.status == 302 assert response.headers["Location"].endswith("?name__exact=chidi") def test_searchable_view_persists_fts_table(app_client): # The search form should persist ?_fts_table as a hidden field response = app_client.get( "/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk" ) inputs = Soup(response.body, "html.parser").find("form").findAll("input") hiddens = [i for i in inputs if i["type"] == "hidden"] assert [("_fts_table", "searchable_fts"), ("_fts_pk", "pk")] == [ (hidden["name"], hidden["value"]) for hidden in hiddens ] def test_sort_by_desc_redirects(app_client): path_base = "/fixtures/sortable" path = ( path_base + "?" + urllib.parse.urlencode({"_sort": "sortable", "_sort_by_desc": "1"}) ) response = app_client.get(path, allow_redirects=False) assert response.status == 302 assert response.headers["Location"].endswith("?_sort_desc=sortable") def test_sort_links(app_client): response = app_client.get("/fixtures/sortable?_sort=sortable") assert response.status == 200 ths = Soup(response.body, "html.parser").findAll("th") attrs_and_link_attrs = [ { "attrs": th.attrs, "a_href": (th.find("a")["href"] if th.find("a") else None), } for th in ths ] assert attrs_and_link_attrs == [ { "attrs": { "class": ["col-Link"], "scope": "col", "data-column": "Link", "data-column-type": "", "data-column-not-null": "0", "data-is-pk": "0", }, "a_href": None, }, { "attrs": { "class": ["col-pk1"], "scope": "col", "data-column": "pk1", "data-column-type": "varchar(30)", "data-column-not-null": "0", "data-is-pk": "1", }, "a_href": None, }, { "attrs": { "class": ["col-pk2"], "scope": "col", "data-column": "pk2", "data-column-type": "varchar(30)", "data-column-not-null": "0", "data-is-pk": "1", }, "a_href": None, }, { "attrs": { "class": ["col-content"], "scope": "col", "data-column": "content", "data-column-type": "text", "data-column-not-null": "0", "data-is-pk": "0", }, "a_href": None, }, { "attrs": { "class": ["col-sortable"], "scope": "col", "data-column": "sortable", "data-column-type": "integer", "data-column-not-null": "0", "data-is-pk": "0", }, "a_href": "/fixtures/sortable?_sort_desc=sortable", }, { "attrs": { "class": ["col-sortable_with_nulls"], "scope": "col", "data-column": "sortable_with_nulls", "data-column-type": "real", "data-column-not-null": "0", "data-is-pk": "0", }, "a_href": "/fixtures/sortable?_sort=sortable_with_nulls", }, { "attrs": { "class": ["col-sortable_with_nulls_2"], "scope": "col", "data-column": "sortable_with_nulls_2", "data-column-type": "real", "data-column-not-null": "0", "data-is-pk": "0", }, "a_href": "/fixtures/sortable?_sort=sortable_with_nulls_2", }, { "attrs": { "class": ["col-text"], "scope": "col", "data-column": "text", "data-column-type": "text", "data-column-not-null": "0", "data-is-pk": "0", }, "a_href": "/fixtures/sortable?_sort=text", }, ] def test_facet_display(app_client): response = app_client.get( "/fixtures/facetable?_facet=planet_int&_facet=city_id&_facet=on_earth" ) assert response.status == 200 soup = Soup(response.body, "html.parser") divs = soup.find("div", {"class": "facet-results"}).findAll("div") actual = [] for div in divs: actual.append( { "name": div.find("strong").text, "items": [ { "name": a.text, "qs": a["href"].split("?")[-1], "count": int(str(a.parent).split("")[1].split("<")[0]), } for a in div.find("ul").findAll("a") ], } ) assert [ { "name": "city_id", "items": [ { "name": "San Francisco", "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&city_id=1", "count": 6, }, { "name": "Los Angeles", "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&city_id=2", "count": 4, }, { "name": "Detroit", "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&city_id=3", "count": 4, }, { "name": "Memnonia", "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&city_id=4", "count": 1, }, ], }, { "name": "planet_int", "items": [ { "name": "1", "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&planet_int=1", "count": 14, }, { "name": "2", "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&planet_int=2", "count": 1, }, ], }, { "name": "on_earth", "items": [ { "name": "1", "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&on_earth=1", "count": 14, }, { "name": "0", "qs": "_facet=planet_int&_facet=city_id&_facet=on_earth&on_earth=0", "count": 1, }, ], }, ] == actual def test_facets_persist_through_filter_form(app_client): response = app_client.get( "/fixtures/facetable?_facet=planet_int&_facet=city_id&_facet_array=tags" ) assert response.status == 200 inputs = Soup(response.body, "html.parser").find("form").findAll("input") hiddens = [i for i in inputs if i["type"] == "hidden"] assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [ ("_facet", "planet_int"), ("_facet", "city_id"), ("_facet_array", "tags"), ] @pytest.mark.xfail @pytest.mark.parametrize( "path,expected_classes", [ ("/", ["index"]), ("/fixtures", ["db", "db-fixtures"]), ("/fixtures?sql=select+1", ["query", "db-fixtures"]), ( "/fixtures/simple_primary_key", ["table", "db-fixtures", "table-simple_primary_key"], ), ( "/fixtures/neighborhood_search", ["query", "db-fixtures", "query-neighborhood_search"], ), ( "/fixtures/table%2Fwith%2Fslashes.csv", ["table", "db-fixtures", "table-tablewithslashescsv-fa7563"], ), ( "/fixtures/simple_primary_key/1", ["row", "db-fixtures", "table-simple_primary_key"], ), ], ) def test_css_classes_on_body(app_client, path, expected_classes): response = app_client.get(path) assert response.status == 200 classes = re.search(r'
', response.text).group(1).split() assert classes == expected_classes @pytest.mark.xfail @pytest.mark.parametrize( "path,expected_considered", [ ("/", "*index.html"), ("/fixtures", "database-fixtures.html, *database.html"), ( "/fixtures/simple_primary_key", "table-fixtures-simple_primary_key.html, *table.html", ), ( "/fixtures/table%2Fwith%2Fslashes.csv", "table-fixtures-tablewithslashescsv-fa7563.html, *table.html", ), ( "/fixtures/simple_primary_key/1", "row-fixtures-simple_primary_key.html, *row.html", ), ], ) def test_templates_considered(app_client, path, expected_considered): response = app_client.get(path) assert response.status == 200 assert ( "".format(expected_considered) in response.text ) def test_table_html_simple_primary_key(app_client): response = app_client.get("/fixtures/simple_primary_key?_size=3") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] ths = table.findAll("th") assert "id\xa0▼" == ths[0].find("a").string.strip() for expected_col, th in zip(("content",), ths[1:]): a = th.find("a") assert expected_col == a.string assert a["href"].endswith( "/simple_primary_key?_size=3&_sort={}".format(expected_col) ) assert ["nofollow"] == a["rel"] assert [ [ '{") def test_config_template_debug_off(app_client): response = app_client.get("/fixtures/facetable?_context=1") assert response.status == 200 assert not response.text.startswith("{") def test_debug_context_includes_extra_template_vars(): # https://github.com/simonw/datasette/issues/693 with make_app_client(config={"template_debug": True}) as client: response = client.get("/fixtures/facetable?_context=1") # scope_path is added by PLUGIN1 assert "scope_path" in response.text def test_metadata_sort(app_client): response = app_client.get("/fixtures/facet_cities") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] ths = table.findAll("th") assert ["id", "name\xa0▼"] == [th.find("a").string.strip() for th in ths] rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] expected = [ [ '3 ', 'Detroit ', ], [ '2 ', 'Los Angeles ', ], [ '4 ', 'Memnonia ', ], [ '1 ', 'San Francisco ', ], ] assert expected == rows # Make sure you can reverse that sort order response = app_client.get("/fixtures/facet_cities?_sort_desc=name") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] assert list(reversed(expected)) == rows def test_metadata_sort_desc(app_client): response = app_client.get("/fixtures/attraction_characteristic") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") assert table["class"] == ["rows-and-columns"] ths = table.findAll("th") assert ["pk\xa0▲", "name"] == [th.find("a").string.strip() for th in ths] rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] expected = [ [ '2 ', 'Paranormal ', ], [ '1 ', 'Museum ', ], ] assert expected == rows # Make sure you can reverse that sort order response = app_client.get("/fixtures/attraction_characteristic?_sort=pk") assert response.status == 200 table = Soup(response.body, "html.parser").find("table") rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")] assert list(reversed(expected)) == rows @pytest.mark.parametrize( "path", [ "/", "/fixtures", "/fixtures/compound_three_primary_keys", "/fixtures/compound_three_primary_keys/a,a,a", "/fixtures/paginated_view", "/fixtures/facetable", ], ) def test_base_url_config(app_client_base_url_prefix, path): client = app_client_base_url_prefix response = client.get("/prefix/" + path.lstrip("/")) soup = Soup(response.body, "html.parser") for el in soup.findAll(["a", "link", "script"]): if "href" in el.attrs: href = el["href"] elif "src" in el.attrs: href = el["src"] else: continue # Could be a if ( not href.startswith("#") and href not in { "https://github.com/simonw/datasette", "https://github.com/simonw/datasette/blob/master/LICENSE", "https://github.com/simonw/datasette/blob/master/tests/fixtures.py", "/login-as-root", # Only used for the latest.datasette.io demo } and not href.startswith("https://plugin-example.com/") ): # If this has been made absolute it may start http://localhost/ if href.startswith("http://localhost/"): href = href[len("http://localost/") :] assert href.startswith("/prefix/"), { "path": path, "href_or_src": href, "element_parent": str(el.parent), } @pytest.mark.parametrize( "path,expected", [ ( "/fixtures/neighborhood_search", "/fixtures?sql=%0Aselect+neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable.city_id+%3D+facet_cities.id%0Awhere+neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+neighborhood%3B%0A&text=", ), ( "/fixtures/neighborhood_search?text=ber", "/fixtures?sql=%0Aselect+neighborhood%2C+facet_cities.name%2C+state%0Afrom+facetable%0A++++join+facet_cities%0A++++++++on+facetable.city_id+%3D+facet_cities.id%0Awhere+neighborhood+like+%27%25%27+%7C%7C+%3Atext+%7C%7C+%27%25%27%0Aorder+by+neighborhood%3B%0A&text=ber", ), ("/fixtures/pragma_cache_size", None), ( "/fixtures/𝐜𝐢𝐭𝐢𝐞𝐬", "/fixtures?sql=select+id%2C+name+from+facet_cities+order+by+id+limit+1%3B", ), ("/fixtures/magic_parameters", None), ], ) def test_edit_sql_link_on_canned_queries(app_client, path, expected): response = app_client.get(path) expected_link = 'Edit SQL'.format( expected ) if expected: assert expected_link in response.text else: assert "Edit SQL" not in response.text @pytest.mark.parametrize("permission_allowed", [True, False]) def test_edit_sql_link_not_shown_if_user_lacks_permission(permission_allowed): with make_app_client( metadata={ "allow_sql": None if permission_allowed else {"id": "not-you"}, "databases": {"fixtures": {"queries": {"simple": "select 1 + 1"}}}, } ) as client: response = client.get("/fixtures/simple") if permission_allowed: assert "Edit SQL" in response.text else: assert "Edit SQL" not in response.text @pytest.mark.parametrize( "actor_id,should_have_links,should_not_have_links", [ (None, None, None), ("test", None, ["/-/permissions"]), ("root", ["/-/permissions", "/-/allow-debug", "/-/metadata"], None), ], ) def test_navigation_menu_links( app_client, actor_id, should_have_links, should_not_have_links ): cookies = {} if actor_id: cookies = {"ds_actor": app_client.actor_cookie({"id": actor_id})} html = app_client.get("/", cookies=cookies).text soup = Soup(html, "html.parser") details = soup.find("nav").find("details") if not actor_id: # Should not show a menu assert details is None return # They are logged in: should show a menu assert details is not None # And a rogout form assert details.find("form") is not None if should_have_links: for link in should_have_links: assert ( details.find("a", {"href": link}) is not None ), "{} expected but missing from nav menu".format(link) if should_not_have_links: for link in should_not_have_links: assert ( details.find("a", {"href": link}) is None ), "{} found but should not have been in nav menu".format(link)