Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix asyncio tests #2464

Merged
merged 8 commits into from
Feb 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/test-coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
run: |-
ls -lah
cat .coveragerc
pytest -m "not serial" --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term
pytest -m "not serial" --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term -x
ls -lah
- name: Upload coverage report
uses: codecov/codecov-action@v1
Expand Down
3 changes: 2 additions & 1 deletion datasette/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1054,7 +1054,8 @@ def resolve_env_secrets(config, environ):
if list(config.keys()) == ["$env"]:
return environ.get(list(config.values())[0])
elif list(config.keys()) == ["$file"]:
return open(list(config.values())[0]).read()
with open(list(config.values())[0]) as fp:
return fp.read()
else:
return {
key: resolve_env_secrets(value, environ)
Expand Down
1 change: 1 addition & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ filterwarnings=
markers =
serial: tests to avoid using with pytest-xdist
asyncio_mode = strict
asyncio_default_fixture_loop_scope = function
1 change: 1 addition & 0 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -659,6 +659,7 @@ async def test_custom_sql(ds_client):
}


@pytest.mark.xfail(reason="Sometimes flaky in CI due to timing issues")
def test_sql_time_limit(app_client_shorter_time_limit):
response = app_client_shorter_time_limit.get(
"/fixtures/-/query.json?sql=select+sleep(0.5)",
Expand Down
2 changes: 1 addition & 1 deletion tests/test_black.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
code_root = Path(__file__).parent.parent


def test_black():
def test_black(event_loop):
runner = CliRunner()
result = runner.invoke(black.main, [str(code_root), "--check"])
assert result.exit_code == 0, result.output
2 changes: 1 addition & 1 deletion tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_inspect_cli(app_client):
assert expected_count == database["tables"][table_name]["count"]


def test_inspect_cli_writes_to_file(app_client):
def test_inspect_cli_writes_to_file(event_loop, app_client):
runner = CliRunner()
result = runner.invoke(
cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"]
Expand Down
21 changes: 11 additions & 10 deletions tests/test_html.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def test_homepage(app_client_two_attached_databases):
)
# We should only show visible, not hidden tables here:
table_links = [
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
{"href": a["href"], "text": a.text.strip()} for a in links_p.find_all("a")
]
assert [
{"href": r"/extra+database/searchable_fts", "text": "searchable_fts"},
Expand Down Expand Up @@ -203,6 +203,7 @@ async def test_disallowed_custom_sql_pragma(ds_client):
)


@pytest.mark.xfail(reason="Sometimes flaky in CI due to timing issues")
def test_sql_time_limit(app_client_shorter_time_limit):
response = app_client_shorter_time_limit.get(
"/fixtures/-/query?sql=select+sleep(0.5)"
Expand All @@ -226,7 +227,7 @@ def test_row_page_does_not_truncate():
assert table["class"] == ["rows-and-columns"]
assert ["Mission"] == [
td.string
for td in table.findAll("td", {"class": "col-neighborhood-b352a7"})
for td in table.find_all("td", {"class": "col-neighborhood-b352a7"})
]


Expand All @@ -242,7 +243,7 @@ def test_query_page_truncates():
)
assert response.status_code == 200
table = Soup(response.content, "html.parser").find("table")
tds = table.findAll("td")
tds = table.find_all("td")
assert [str(td) for td in tds] == [
'<td class="col-a">this …</td>',
'<td class="col-b"><a href="https://example.com/">http…</a></td>',
Expand Down Expand Up @@ -407,7 +408,7 @@ async def test_row_links_from_other_tables(
soup = Soup(response.text, "html.parser")
h2 = soup.find("h2")
assert h2.text == "Links from other tables"
li = h2.findNext("ul").find("li")
li = h2.find_next("ul").find("li")
text = re.sub(r"\s+", " ", li.text.strip())
assert text == expected_text
link = li.find("a")["href"]
Expand Down Expand Up @@ -501,7 +502,7 @@ def test_database_download_for_immutable():
# Regular page should have a download link
response = client.get("/fixtures")
soup = Soup(response.content, "html.parser")
assert len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
assert len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
# Check we can actually download it
download_response = client.get("/fixtures.db")
assert download_response.status_code == 200
Expand Down Expand Up @@ -530,7 +531,7 @@ def test_database_download_disallowed_for_mutable(app_client):
# Use app_client because we need a file database, not in-memory
response = app_client.get("/fixtures")
soup = Soup(response.content, "html.parser")
assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) == 0
assert len(soup.find_all("a", {"href": re.compile(r"\.db$")})) == 0
assert app_client.get("/fixtures.db").status_code == 403


Expand All @@ -539,7 +540,7 @@ def test_database_download_disallowed_for_memory():
# Memory page should NOT have a download link
response = client.get("/_memory")
soup = Soup(response.content, "html.parser")
assert 0 == len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
assert 0 == len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
assert 404 == client.get("/_memory.db").status


Expand All @@ -549,7 +550,7 @@ def test_allow_download_off():
) as client:
response = client.get("/fixtures")
soup = Soup(response.content, "html.parser")
assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
assert not len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
# Accessing URL directly should 403
response = client.get("/fixtures.db")
assert 403 == response.status
Expand All @@ -559,7 +560,7 @@ def test_allow_sql_off():
with make_app_client(config={"allow_sql": {}}) as client:
response = client.get("/fixtures")
soup = Soup(response.content, "html.parser")
assert not len(soup.findAll("textarea", {"name": "sql"}))
assert not len(soup.find_all("textarea", {"name": "sql"}))
# The table page should no longer show "View and edit SQL"
response = client.get("/fixtures/sortable")
assert b"View and edit SQL" not in response.content
Expand Down Expand Up @@ -855,7 +856,7 @@ def test_base_url_config(app_client_base_url_prefix, path, use_prefix):
soup = Soup(response.content, "html.parser")
for form in soup.select("form"):
assert form["action"].startswith("/prefix")
for el in soup.findAll(["a", "link", "script"]):
for el in soup.find_all(["a", "link", "script"]):
if "href" in el.attrs:
href = el["href"]
elif "src" in el.attrs:
Expand Down
5 changes: 3 additions & 2 deletions tests/test_permissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ async def test_permissions_debug(ds_client, filter_):
assert fragment in response.text
# Should show one failure and one success
soup = Soup(response.text, "html.parser")
check_divs = soup.findAll("div", {"class": "check"})
check_divs = soup.find_all("div", {"class": "check"})
checks = [
{
"action": div.select_one(".check-action").text,
Expand Down Expand Up @@ -929,6 +929,7 @@ async def test_actor_endpoint_allows_any_token():
}


@pytest.mark.serial
@pytest.mark.parametrize(
"options,expected",
(
Expand Down Expand Up @@ -983,7 +984,7 @@ async def test_actor_endpoint_allows_any_token():
),
),
)
def test_cli_create_token(event_loop, options, expected):
def test_cli_create_token(options, expected):
runner = CliRunner()
result1 = runner.invoke(
cli,
Expand Down
8 changes: 4 additions & 4 deletions tests/test_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ async def test_hook_plugin_prepare_connection_arguments(ds_client):
async def test_hook_extra_css_urls(ds_client, path, expected_decoded_object):
response = await ds_client.get(path)
assert response.status_code == 200
links = Soup(response.text, "html.parser").findAll("link")
links = Soup(response.text, "html.parser").find_all("link")
special_href = [
link
for link in links
Expand All @@ -128,7 +128,7 @@ async def test_hook_extra_css_urls(ds_client, path, expected_decoded_object):
@pytest.mark.asyncio
async def test_hook_extra_js_urls(ds_client):
response = await ds_client.get("/")
scripts = Soup(response.text, "html.parser").findAll("script")
scripts = Soup(response.text, "html.parser").find_all("script")
script_attrs = [s.attrs for s in scripts]
for attrs in [
{
Expand All @@ -153,7 +153,7 @@ async def test_plugins_with_duplicate_js_urls(ds_client):
# What matters is that https://plugin-example.datasette.io/jquery.js is only there once
# and it comes before plugin1.js and plugin2.js which could be in either
# order
scripts = Soup(response.text, "html.parser").findAll("script")
scripts = Soup(response.text, "html.parser").find_all("script")
srcs = [s["src"] for s in scripts if s.get("src")]
# No duplicates allowed:
assert len(srcs) == len(set(srcs))
Expand Down Expand Up @@ -541,7 +541,7 @@ async def test_hook_register_output_renderer_can_render(ds_client):
links = (
Soup(response.text, "html.parser")
.find("p", {"class": "export-links"})
.findAll("a")
.find_all("a")
)
actual = [link["href"] for link in links]
# Should not be present because we sent ?_no_can_render=1
Expand Down
Loading
Loading