kopia lustrzana https://github.com/simonw/datasette
				
				
				
			Test improvements and fixed deprecation warnings (#2464)
* `asyncio_default_fixture_loop_scope = function` * Fix a bunch of BeautifulSoup deprecation warnings * Fix for PytestUnraisableExceptionWarning: Exception ignored in: <_io.FileIO [closed]> * xfail for sql_time_limit tests (these can be flaky in CI) Refs #2461request-id
							rodzic
							
								
									962da77d61
								
							
						
					
					
						commit
						53a3b3c80e
					
				| 
						 | 
				
			
			@ -31,7 +31,7 @@ jobs:
 | 
			
		|||
      run: |-
 | 
			
		||||
        ls -lah
 | 
			
		||||
        cat .coveragerc
 | 
			
		||||
        pytest -m "not serial" --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term
 | 
			
		||||
        pytest -m "not serial" --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term -x
 | 
			
		||||
        ls -lah
 | 
			
		||||
    - name: Upload coverage report
 | 
			
		||||
      uses: codecov/codecov-action@v1
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1054,7 +1054,8 @@ def resolve_env_secrets(config, environ):
 | 
			
		|||
        if list(config.keys()) == ["$env"]:
 | 
			
		||||
            return environ.get(list(config.values())[0])
 | 
			
		||||
        elif list(config.keys()) == ["$file"]:
 | 
			
		||||
            return open(list(config.values())[0]).read()
 | 
			
		||||
            with open(list(config.values())[0]) as fp:
 | 
			
		||||
                return fp.read()
 | 
			
		||||
        else:
 | 
			
		||||
            return {
 | 
			
		||||
                key: resolve_env_secrets(value, environ)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -7,3 +7,4 @@ filterwarnings=
 | 
			
		|||
markers =
 | 
			
		||||
    serial: tests to avoid using with pytest-xdist
 | 
			
		||||
asyncio_mode = strict
 | 
			
		||||
asyncio_default_fixture_loop_scope = function
 | 
			
		||||
| 
						 | 
				
			
			@ -659,6 +659,7 @@ async def test_custom_sql(ds_client):
 | 
			
		|||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.xfail(reason="Sometimes flaky in CI due to timing issues")
 | 
			
		||||
def test_sql_time_limit(app_client_shorter_time_limit):
 | 
			
		||||
    response = app_client_shorter_time_limit.get(
 | 
			
		||||
        "/fixtures/-/query.json?sql=select+sleep(0.5)",
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -5,7 +5,7 @@ from pathlib import Path
 | 
			
		|||
code_root = Path(__file__).parent.parent
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_black():
 | 
			
		||||
def test_black(event_loop):
 | 
			
		||||
    runner = CliRunner()
 | 
			
		||||
    result = runner.invoke(black.main, [str(code_root), "--check"])
 | 
			
		||||
    assert result.exit_code == 0, result.output
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -36,7 +36,7 @@ def test_inspect_cli(app_client):
 | 
			
		|||
        assert expected_count == database["tables"][table_name]["count"]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_inspect_cli_writes_to_file(app_client):
 | 
			
		||||
def test_inspect_cli_writes_to_file(event_loop, app_client):
 | 
			
		||||
    runner = CliRunner()
 | 
			
		||||
    result = runner.invoke(
 | 
			
		||||
        cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"]
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -45,7 +45,7 @@ def test_homepage(app_client_two_attached_databases):
 | 
			
		|||
    )
 | 
			
		||||
    # We should only show visible, not hidden tables here:
 | 
			
		||||
    table_links = [
 | 
			
		||||
        {"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
 | 
			
		||||
        {"href": a["href"], "text": a.text.strip()} for a in links_p.find_all("a")
 | 
			
		||||
    ]
 | 
			
		||||
    assert [
 | 
			
		||||
        {"href": r"/extra+database/searchable_fts", "text": "searchable_fts"},
 | 
			
		||||
| 
						 | 
				
			
			@ -203,6 +203,7 @@ async def test_disallowed_custom_sql_pragma(ds_client):
 | 
			
		|||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.xfail(reason="Sometimes flaky in CI due to timing issues")
 | 
			
		||||
def test_sql_time_limit(app_client_shorter_time_limit):
 | 
			
		||||
    response = app_client_shorter_time_limit.get(
 | 
			
		||||
        "/fixtures/-/query?sql=select+sleep(0.5)"
 | 
			
		||||
| 
						 | 
				
			
			@ -226,7 +227,7 @@ def test_row_page_does_not_truncate():
 | 
			
		|||
        assert table["class"] == ["rows-and-columns"]
 | 
			
		||||
        assert ["Mission"] == [
 | 
			
		||||
            td.string
 | 
			
		||||
            for td in table.findAll("td", {"class": "col-neighborhood-b352a7"})
 | 
			
		||||
            for td in table.find_all("td", {"class": "col-neighborhood-b352a7"})
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -242,7 +243,7 @@ def test_query_page_truncates():
 | 
			
		|||
        )
 | 
			
		||||
        assert response.status_code == 200
 | 
			
		||||
        table = Soup(response.content, "html.parser").find("table")
 | 
			
		||||
        tds = table.findAll("td")
 | 
			
		||||
        tds = table.find_all("td")
 | 
			
		||||
        assert [str(td) for td in tds] == [
 | 
			
		||||
            '<td class="col-a">this …</td>',
 | 
			
		||||
            '<td class="col-b"><a href="https://example.com/">http…</a></td>',
 | 
			
		||||
| 
						 | 
				
			
			@ -407,7 +408,7 @@ async def test_row_links_from_other_tables(
 | 
			
		|||
    soup = Soup(response.text, "html.parser")
 | 
			
		||||
    h2 = soup.find("h2")
 | 
			
		||||
    assert h2.text == "Links from other tables"
 | 
			
		||||
    li = h2.findNext("ul").find("li")
 | 
			
		||||
    li = h2.find_next("ul").find("li")
 | 
			
		||||
    text = re.sub(r"\s+", " ", li.text.strip())
 | 
			
		||||
    assert text == expected_text
 | 
			
		||||
    link = li.find("a")["href"]
 | 
			
		||||
| 
						 | 
				
			
			@ -501,7 +502,7 @@ def test_database_download_for_immutable():
 | 
			
		|||
        # Regular page should have a download link
 | 
			
		||||
        response = client.get("/fixtures")
 | 
			
		||||
        soup = Soup(response.content, "html.parser")
 | 
			
		||||
        assert len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
 | 
			
		||||
        assert len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
 | 
			
		||||
        # Check we can actually download it
 | 
			
		||||
        download_response = client.get("/fixtures.db")
 | 
			
		||||
        assert download_response.status_code == 200
 | 
			
		||||
| 
						 | 
				
			
			@ -530,7 +531,7 @@ def test_database_download_disallowed_for_mutable(app_client):
 | 
			
		|||
    # Use app_client because we need a file database, not in-memory
 | 
			
		||||
    response = app_client.get("/fixtures")
 | 
			
		||||
    soup = Soup(response.content, "html.parser")
 | 
			
		||||
    assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) == 0
 | 
			
		||||
    assert len(soup.find_all("a", {"href": re.compile(r"\.db$")})) == 0
 | 
			
		||||
    assert app_client.get("/fixtures.db").status_code == 403
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -539,7 +540,7 @@ def test_database_download_disallowed_for_memory():
 | 
			
		|||
        # Memory page should NOT have a download link
 | 
			
		||||
        response = client.get("/_memory")
 | 
			
		||||
        soup = Soup(response.content, "html.parser")
 | 
			
		||||
        assert 0 == len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
 | 
			
		||||
        assert 0 == len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
 | 
			
		||||
        assert 404 == client.get("/_memory.db").status
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -549,7 +550,7 @@ def test_allow_download_off():
 | 
			
		|||
    ) as client:
 | 
			
		||||
        response = client.get("/fixtures")
 | 
			
		||||
        soup = Soup(response.content, "html.parser")
 | 
			
		||||
        assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
 | 
			
		||||
        assert not len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
 | 
			
		||||
        # Accessing URL directly should 403
 | 
			
		||||
        response = client.get("/fixtures.db")
 | 
			
		||||
        assert 403 == response.status
 | 
			
		||||
| 
						 | 
				
			
			@ -559,7 +560,7 @@ def test_allow_sql_off():
 | 
			
		|||
    with make_app_client(config={"allow_sql": {}}) as client:
 | 
			
		||||
        response = client.get("/fixtures")
 | 
			
		||||
        soup = Soup(response.content, "html.parser")
 | 
			
		||||
        assert not len(soup.findAll("textarea", {"name": "sql"}))
 | 
			
		||||
        assert not len(soup.find_all("textarea", {"name": "sql"}))
 | 
			
		||||
        # The table page should no longer show "View and edit SQL"
 | 
			
		||||
        response = client.get("/fixtures/sortable")
 | 
			
		||||
        assert b"View and edit SQL" not in response.content
 | 
			
		||||
| 
						 | 
				
			
			@ -855,7 +856,7 @@ def test_base_url_config(app_client_base_url_prefix, path, use_prefix):
 | 
			
		|||
    soup = Soup(response.content, "html.parser")
 | 
			
		||||
    for form in soup.select("form"):
 | 
			
		||||
        assert form["action"].startswith("/prefix")
 | 
			
		||||
    for el in soup.findAll(["a", "link", "script"]):
 | 
			
		||||
    for el in soup.find_all(["a", "link", "script"]):
 | 
			
		||||
        if "href" in el.attrs:
 | 
			
		||||
            href = el["href"]
 | 
			
		||||
        elif "src" in el.attrs:
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -390,7 +390,7 @@ async def test_permissions_debug(ds_client, filter_):
 | 
			
		|||
        assert fragment in response.text
 | 
			
		||||
    # Should show one failure and one success
 | 
			
		||||
    soup = Soup(response.text, "html.parser")
 | 
			
		||||
    check_divs = soup.findAll("div", {"class": "check"})
 | 
			
		||||
    check_divs = soup.find_all("div", {"class": "check"})
 | 
			
		||||
    checks = [
 | 
			
		||||
        {
 | 
			
		||||
            "action": div.select_one(".check-action").text,
 | 
			
		||||
| 
						 | 
				
			
			@ -929,6 +929,7 @@ async def test_actor_endpoint_allows_any_token():
 | 
			
		|||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.serial
 | 
			
		||||
@pytest.mark.parametrize(
 | 
			
		||||
    "options,expected",
 | 
			
		||||
    (
 | 
			
		||||
| 
						 | 
				
			
			@ -983,7 +984,7 @@ async def test_actor_endpoint_allows_any_token():
 | 
			
		|||
        ),
 | 
			
		||||
    ),
 | 
			
		||||
)
 | 
			
		||||
def test_cli_create_token(event_loop, options, expected):
 | 
			
		||||
def test_cli_create_token(options, expected):
 | 
			
		||||
    runner = CliRunner()
 | 
			
		||||
    result1 = runner.invoke(
 | 
			
		||||
        cli,
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -113,7 +113,7 @@ async def test_hook_plugin_prepare_connection_arguments(ds_client):
 | 
			
		|||
async def test_hook_extra_css_urls(ds_client, path, expected_decoded_object):
 | 
			
		||||
    response = await ds_client.get(path)
 | 
			
		||||
    assert response.status_code == 200
 | 
			
		||||
    links = Soup(response.text, "html.parser").findAll("link")
 | 
			
		||||
    links = Soup(response.text, "html.parser").find_all("link")
 | 
			
		||||
    special_href = [
 | 
			
		||||
        link
 | 
			
		||||
        for link in links
 | 
			
		||||
| 
						 | 
				
			
			@ -128,7 +128,7 @@ async def test_hook_extra_css_urls(ds_client, path, expected_decoded_object):
 | 
			
		|||
@pytest.mark.asyncio
 | 
			
		||||
async def test_hook_extra_js_urls(ds_client):
 | 
			
		||||
    response = await ds_client.get("/")
 | 
			
		||||
    scripts = Soup(response.text, "html.parser").findAll("script")
 | 
			
		||||
    scripts = Soup(response.text, "html.parser").find_all("script")
 | 
			
		||||
    script_attrs = [s.attrs for s in scripts]
 | 
			
		||||
    for attrs in [
 | 
			
		||||
        {
 | 
			
		||||
| 
						 | 
				
			
			@ -153,7 +153,7 @@ async def test_plugins_with_duplicate_js_urls(ds_client):
 | 
			
		|||
    # What matters is that https://plugin-example.datasette.io/jquery.js is only there once
 | 
			
		||||
    # and it comes before plugin1.js and plugin2.js which could be in either
 | 
			
		||||
    # order
 | 
			
		||||
    scripts = Soup(response.text, "html.parser").findAll("script")
 | 
			
		||||
    scripts = Soup(response.text, "html.parser").find_all("script")
 | 
			
		||||
    srcs = [s["src"] for s in scripts if s.get("src")]
 | 
			
		||||
    # No duplicates allowed:
 | 
			
		||||
    assert len(srcs) == len(set(srcs))
 | 
			
		||||
| 
						 | 
				
			
			@ -541,7 +541,7 @@ async def test_hook_register_output_renderer_can_render(ds_client):
 | 
			
		|||
    links = (
 | 
			
		||||
        Soup(response.text, "html.parser")
 | 
			
		||||
        .find("p", {"class": "export-links"})
 | 
			
		||||
        .findAll("a")
 | 
			
		||||
        .find_all("a")
 | 
			
		||||
    )
 | 
			
		||||
    actual = [link["href"] for link in links]
 | 
			
		||||
    # Should not be present because we sent ?_no_can_render=1
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -68,13 +68,13 @@ def test_table_cell_truncation():
 | 
			
		|||
            "Arcad…",
 | 
			
		||||
        ] == [
 | 
			
		||||
            td.string
 | 
			
		||||
            for td in table.findAll("td", {"class": "col-neighborhood-b352a7"})
 | 
			
		||||
            for td in table.find_all("td", {"class": "col-neighborhood-b352a7"})
 | 
			
		||||
        ]
 | 
			
		||||
        # URLs should be truncated too
 | 
			
		||||
        response2 = client.get("/fixtures/roadside_attractions")
 | 
			
		||||
        assert response2.status == 200
 | 
			
		||||
        table = Soup(response2.body, "html.parser").find("table")
 | 
			
		||||
        tds = table.findAll("td", {"class": "col-url"})
 | 
			
		||||
        tds = table.find_all("td", {"class": "col-url"})
 | 
			
		||||
        assert [str(td) for td in tds] == [
 | 
			
		||||
            '<td class="col-url type-str"><a href="https://www.mysteryspot.com/">http…</a></td>',
 | 
			
		||||
            '<td class="col-url type-str"><a href="https://winchestermysteryhouse.com/">http…</a></td>',
 | 
			
		||||
| 
						 | 
				
			
			@ -210,7 +210,7 @@ async def test_searchable_view_persists_fts_table(ds_client):
 | 
			
		|||
    response = await ds_client.get(
 | 
			
		||||
        "/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk"
 | 
			
		||||
    )
 | 
			
		||||
    inputs = Soup(response.text, "html.parser").find("form").findAll("input")
 | 
			
		||||
    inputs = Soup(response.text, "html.parser").find("form").find_all("input")
 | 
			
		||||
    hiddens = [i for i in inputs if i["type"] == "hidden"]
 | 
			
		||||
    assert [("_fts_table", "searchable_fts"), ("_fts_pk", "pk")] == [
 | 
			
		||||
        (hidden["name"], hidden["value"]) for hidden in hiddens
 | 
			
		||||
| 
						 | 
				
			
			@ -234,7 +234,7 @@ async def test_sort_by_desc_redirects(ds_client):
 | 
			
		|||
async def test_sort_links(ds_client):
 | 
			
		||||
    response = await ds_client.get("/fixtures/sortable?_sort=sortable")
 | 
			
		||||
    assert response.status_code == 200
 | 
			
		||||
    ths = Soup(response.text, "html.parser").findAll("th")
 | 
			
		||||
    ths = Soup(response.text, "html.parser").find_all("th")
 | 
			
		||||
    attrs_and_link_attrs = [
 | 
			
		||||
        {
 | 
			
		||||
            "attrs": th.attrs,
 | 
			
		||||
| 
						 | 
				
			
			@ -341,7 +341,7 @@ async def test_facet_display(ds_client):
 | 
			
		|||
    )
 | 
			
		||||
    assert response.status_code == 200
 | 
			
		||||
    soup = Soup(response.text, "html.parser")
 | 
			
		||||
    divs = soup.find("div", {"class": "facet-results"}).findAll("div")
 | 
			
		||||
    divs = soup.find("div", {"class": "facet-results"}).find_all("div")
 | 
			
		||||
    actual = []
 | 
			
		||||
    for div in divs:
 | 
			
		||||
        actual.append(
 | 
			
		||||
| 
						 | 
				
			
			@ -353,7 +353,7 @@ async def test_facet_display(ds_client):
 | 
			
		|||
                        "qs": a["href"].split("?")[-1],
 | 
			
		||||
                        "count": int(str(a.parent).split("</a>")[1].split("<")[0]),
 | 
			
		||||
                    }
 | 
			
		||||
                    for a in div.find("ul").findAll("a")
 | 
			
		||||
                    for a in div.find("ul").find_all("a")
 | 
			
		||||
                ],
 | 
			
		||||
            }
 | 
			
		||||
        )
 | 
			
		||||
| 
						 | 
				
			
			@ -422,7 +422,7 @@ async def test_facets_persist_through_filter_form(ds_client):
 | 
			
		|||
        "/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet_array=tags"
 | 
			
		||||
    )
 | 
			
		||||
    assert response.status_code == 200
 | 
			
		||||
    inputs = Soup(response.text, "html.parser").find("form").findAll("input")
 | 
			
		||||
    inputs = Soup(response.text, "html.parser").find("form").find_all("input")
 | 
			
		||||
    hiddens = [i for i in inputs if i["type"] == "hidden"]
 | 
			
		||||
    assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [
 | 
			
		||||
        ("_facet", "planet_int"),
 | 
			
		||||
| 
						 | 
				
			
			@ -435,7 +435,7 @@ async def test_facets_persist_through_filter_form(ds_client):
 | 
			
		|||
async def test_next_does_not_persist_in_hidden_field(ds_client):
 | 
			
		||||
    response = await ds_client.get("/fixtures/searchable?_size=1&_next=1")
 | 
			
		||||
    assert response.status_code == 200
 | 
			
		||||
    inputs = Soup(response.text, "html.parser").find("form").findAll("input")
 | 
			
		||||
    inputs = Soup(response.text, "html.parser").find("form").find_all("input")
 | 
			
		||||
    hiddens = [i for i in inputs if i["type"] == "hidden"]
 | 
			
		||||
    assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [
 | 
			
		||||
        ("_size", "1"),
 | 
			
		||||
| 
						 | 
				
			
			@ -448,7 +448,7 @@ async def test_table_html_simple_primary_key(ds_client):
 | 
			
		|||
    assert response.status_code == 200
 | 
			
		||||
    table = Soup(response.text, "html.parser").find("table")
 | 
			
		||||
    assert table["class"] == ["rows-and-columns"]
 | 
			
		||||
    ths = table.findAll("th")
 | 
			
		||||
    ths = table.find_all("th")
 | 
			
		||||
    assert "id\xa0▼" == ths[0].find("a").string.strip()
 | 
			
		||||
    for expected_col, th in zip(("content",), ths[1:]):
 | 
			
		||||
        a = th.find("a")
 | 
			
		||||
| 
						 | 
				
			
			@ -479,7 +479,7 @@ async def test_table_csv_json_export_interface(ds_client):
 | 
			
		|||
    links = (
 | 
			
		||||
        Soup(response.text, "html.parser")
 | 
			
		||||
        .find("p", {"class": "export-links"})
 | 
			
		||||
        .findAll("a")
 | 
			
		||||
        .find_all("a")
 | 
			
		||||
    )
 | 
			
		||||
    actual = [link["href"] for link in links]
 | 
			
		||||
    expected = [
 | 
			
		||||
| 
						 | 
				
			
			@ -493,7 +493,7 @@ async def test_table_csv_json_export_interface(ds_client):
 | 
			
		|||
    assert expected == actual
 | 
			
		||||
    # And the advanced export box at the bottom:
 | 
			
		||||
    div = Soup(response.text, "html.parser").find("div", {"class": "advanced-export"})
 | 
			
		||||
    json_links = [a["href"] for a in div.find("p").findAll("a")]
 | 
			
		||||
    json_links = [a["href"] for a in div.find("p").find_all("a")]
 | 
			
		||||
    assert [
 | 
			
		||||
        "/fixtures/simple_primary_key.json?id__gt=2",
 | 
			
		||||
        "/fixtures/simple_primary_key.json?id__gt=2&_shape=array",
 | 
			
		||||
| 
						 | 
				
			
			@ -503,7 +503,7 @@ async def test_table_csv_json_export_interface(ds_client):
 | 
			
		|||
    # And the CSV form
 | 
			
		||||
    form = div.find("form")
 | 
			
		||||
    assert form["action"].endswith("/simple_primary_key.csv")
 | 
			
		||||
    inputs = [str(input) for input in form.findAll("input")]
 | 
			
		||||
    inputs = [str(input) for input in form.find_all("input")]
 | 
			
		||||
    assert [
 | 
			
		||||
        '<input name="_dl" type="checkbox"/>',
 | 
			
		||||
        '<input type="submit" value="Export CSV"/>',
 | 
			
		||||
| 
						 | 
				
			
			@ -519,7 +519,7 @@ async def test_csv_json_export_links_include_labels_if_foreign_keys(ds_client):
 | 
			
		|||
    links = (
 | 
			
		||||
        Soup(response.text, "html.parser")
 | 
			
		||||
        .find("p", {"class": "export-links"})
 | 
			
		||||
        .findAll("a")
 | 
			
		||||
        .find_all("a")
 | 
			
		||||
    )
 | 
			
		||||
    actual = [link["href"] for link in links]
 | 
			
		||||
    expected = [
 | 
			
		||||
| 
						 | 
				
			
			@ -571,7 +571,7 @@ async def test_rowid_sortable_no_primary_key(ds_client):
 | 
			
		|||
    assert response.status_code == 200
 | 
			
		||||
    table = Soup(response.text, "html.parser").find("table")
 | 
			
		||||
    assert table["class"] == ["rows-and-columns"]
 | 
			
		||||
    ths = table.findAll("th")
 | 
			
		||||
    ths = table.find_all("th")
 | 
			
		||||
    assert "rowid\xa0▼" == ths[1].find("a").string.strip()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -580,7 +580,7 @@ async def test_table_html_compound_primary_key(ds_client):
 | 
			
		|||
    response = await ds_client.get("/fixtures/compound_primary_key")
 | 
			
		||||
    assert response.status_code == 200
 | 
			
		||||
    table = Soup(response.text, "html.parser").find("table")
 | 
			
		||||
    ths = table.findAll("th")
 | 
			
		||||
    ths = table.find_all("th")
 | 
			
		||||
    assert "Link" == ths[0].string.strip()
 | 
			
		||||
    for expected_col, th in zip(("pk1", "pk2", "content"), ths[1:]):
 | 
			
		||||
        a = th.find("a")
 | 
			
		||||
| 
						 | 
				
			
			@ -811,7 +811,7 @@ async def test_advanced_export_box(ds_client, path, has_object, has_stream, has_
 | 
			
		|||
    if has_object:
 | 
			
		||||
        expected_json_shapes.append("object")
 | 
			
		||||
    div = soup.find("div", {"class": "advanced-export"})
 | 
			
		||||
    assert expected_json_shapes == [a.text for a in div.find("p").findAll("a")]
 | 
			
		||||
    assert expected_json_shapes == [a.text for a in div.find("p").find_all("a")]
 | 
			
		||||
    # "stream all rows" option
 | 
			
		||||
    if has_stream:
 | 
			
		||||
        assert "stream all rows" in str(div)
 | 
			
		||||
| 
						 | 
				
			
			@ -828,13 +828,13 @@ async def test_extra_where_clauses(ds_client):
 | 
			
		|||
    soup = Soup(response.text, "html.parser")
 | 
			
		||||
    div = soup.select(".extra-wheres")[0]
 | 
			
		||||
    assert "2 extra where clauses" == div.find("h3").text
 | 
			
		||||
    hrefs = [a["href"] for a in div.findAll("a")]
 | 
			
		||||
    hrefs = [a["href"] for a in div.find_all("a")]
 | 
			
		||||
    assert [
 | 
			
		||||
        "/fixtures/facetable?_where=_city_id%3D1",
 | 
			
		||||
        "/fixtures/facetable?_where=_neighborhood%3D%27Dogpatch%27",
 | 
			
		||||
    ] == hrefs
 | 
			
		||||
    # These should also be persisted as hidden fields
 | 
			
		||||
    inputs = soup.find("form").findAll("input")
 | 
			
		||||
    inputs = soup.find("form").find_all("input")
 | 
			
		||||
    hiddens = [i for i in inputs if i["type"] == "hidden"]
 | 
			
		||||
    assert [("_where", "_neighborhood='Dogpatch'"), ("_where", "_city_id=1")] == [
 | 
			
		||||
        (hidden["name"], hidden["value"]) for hidden in hiddens
 | 
			
		||||
| 
						 | 
				
			
			@ -859,7 +859,7 @@ async def test_extra_where_clauses(ds_client):
 | 
			
		|||
async def test_other_hidden_form_fields(ds_client, path, expected_hidden):
 | 
			
		||||
    response = await ds_client.get(path)
 | 
			
		||||
    soup = Soup(response.text, "html.parser")
 | 
			
		||||
    inputs = soup.find("form").findAll("input")
 | 
			
		||||
    inputs = soup.find("form").find_all("input")
 | 
			
		||||
    hiddens = [i for i in inputs if i["type"] == "hidden"]
 | 
			
		||||
    assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -878,7 +878,7 @@ async def test_search_and_sort_fields_not_duplicated(ds_client, path, expected_h
 | 
			
		|||
    # https://github.com/simonw/datasette/issues/1214
 | 
			
		||||
    response = await ds_client.get(path)
 | 
			
		||||
    soup = Soup(response.text, "html.parser")
 | 
			
		||||
    inputs = soup.find("form").findAll("input")
 | 
			
		||||
    inputs = soup.find("form").find_all("input")
 | 
			
		||||
    hiddens = [i for i in inputs if i["type"] == "hidden"]
 | 
			
		||||
    assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -960,7 +960,7 @@ async def test_metadata_sort(ds_client):
 | 
			
		|||
    assert response.status_code == 200
 | 
			
		||||
    table = Soup(response.text, "html.parser").find("table")
 | 
			
		||||
    assert table["class"] == ["rows-and-columns"]
 | 
			
		||||
    ths = table.findAll("th")
 | 
			
		||||
    ths = table.find_all("th")
 | 
			
		||||
    assert ["id", "name\xa0▼"] == [th.find("a").string.strip() for th in ths]
 | 
			
		||||
    rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")]
 | 
			
		||||
    expected = [
 | 
			
		||||
| 
						 | 
				
			
			@ -996,7 +996,7 @@ async def test_metadata_sort_desc(ds_client):
 | 
			
		|||
    assert response.status_code == 200
 | 
			
		||||
    table = Soup(response.text, "html.parser").find("table")
 | 
			
		||||
    assert table["class"] == ["rows-and-columns"]
 | 
			
		||||
    ths = table.findAll("th")
 | 
			
		||||
    ths = table.find_all("th")
 | 
			
		||||
    assert ["pk\xa0▲", "name"] == [th.find("a").string.strip() for th in ths]
 | 
			
		||||
    rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")]
 | 
			
		||||
    expected = [
 | 
			
		||||
| 
						 | 
				
			
			@ -1098,7 +1098,7 @@ async def test_column_metadata(ds_client):
 | 
			
		|||
    response = await ds_client.get("/fixtures/roadside_attractions")
 | 
			
		||||
    soup = Soup(response.text, "html.parser")
 | 
			
		||||
    dl = soup.find("dl")
 | 
			
		||||
    assert [(dt.text, dt.nextSibling.text) for dt in dl.findAll("dt")] == [
 | 
			
		||||
    assert [(dt.text, dt.next_sibling.text) for dt in dl.find_all("dt")] == [
 | 
			
		||||
        ("address", "The street address for the attraction"),
 | 
			
		||||
        ("name", "The name of the attraction"),
 | 
			
		||||
    ]
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -7,7 +7,7 @@ def last_event(datasette):
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
def assert_footer_links(soup):
 | 
			
		||||
    footer_links = soup.find("footer").findAll("a")
 | 
			
		||||
    footer_links = soup.find("footer").find_all("a")
 | 
			
		||||
    assert 4 == len(footer_links)
 | 
			
		||||
    datasette_link, license_link, source_link, about_link = footer_links
 | 
			
		||||
    assert "Datasette" == datasette_link.text.strip()
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Ładowanie…
	
		Reference in New Issue