ripgrep
asgi-log-to-sqlite/setup.py
24 py_modules=["asgi_log_to_sqlite"],
25 install_requires=["sqlite_utils~=2.3.1"],
26 extras_require={"test": ["pytest", "pytest-asyncio", "asgiref==3.1.2"]},
27 )
asgi-log-to-sqlite/test_asgi_log_to_sqlite.py
2 from asgi_log_to_sqlite import AsgiLogToSqlite
3 import sqlite_utils
4 import pytest
5
6
17
18
19 @pytest.mark.asyncio
20 async def test_log_to_sqlite(tmpdir):
21 logfile = str(tmpdir / "log.db")
44
45
46 @pytest.mark.asyncio
47 async def test_log_to_sqlite_with_more_fields(tmpdir):
48 logfile = str(tmpdir / "log2.db")
csvs-to-sqlite/CHANGELOG.md
133 Which is itself an updated version of the pattern described in http://dan-blanchard.roughdraft.io/7045057-quicker-travis-builds-that-rely-on-numpy-and-scipy-using-miniconda
134
135 I had to switch to running `pytest` directly, because `python setup.py test` was still trying to install a pandas package that involved compiling everything from scratch (which is why Travis CI builds were taking around 15 minutes).
136 - Don't include an `index` column - rely on SQLite rowid instead.
137
csvs-to-sqlite/setup.cfg
1 [aliases]
2 test=pytest
3
4 [bdist_wheel]
csvs-to-sqlite/setup.py
30 "six",
31 ],
32 extras_require={"test": ["pytest"]},
33 tests_require=["csvs-to-sqlite[test]"],
34 entry_points="""
datasette/pytest.ini
1 [pytest]
2 filterwarnings=
3 # https://github.com/pallets/jinja/issues/927
datasette/setup.py
65 datasette=datasette.cli:cli
66 """,
67 setup_requires=["pytest-runner"],
68 extras_require={
69 "docs": ["sphinx_rtd_theme", "sphinx-autobuild"],
70 "test": [
71 "pytest>=5.2.2,<6.3.0",
72 "pytest-asyncio>=0.10,<0.15",
73 "beautifulsoup4>=4.8.1,<4.10.0",
74 "black==20.8b1",
75 "pytest-timeout>=1.4.2,<1.5",
76 "trustme>=0.7,<0.8",
77 ],
datasette/setup.cfg
1 [aliases]
2 test=pytest
3
4 [flake8]
datasette-allow-permissions-debug/setup.py
31 install_requires=["datasette"],
32 extras_require={
33 "test": ["pytest", "pytest-asyncio", "httpx"]
34 },
35 tests_require=["datasette-allow-permissions-debug[test]"],
datasette-atom/setup.py
30 entry_points={"datasette": ["atom = datasette_atom"]},
31 install_requires=["datasette>=0.49", "feedgen", "bleach"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-atom[test]"],
34 )
datasette-auth-existing-cookies/setup.py
29 install_requires=["appdirs", "httpx", "itsdangerous"],
30 extras_require={
31 "test": ["datasette", "pytest", "pytest-asyncio", "asgiref~=3.1.2"]
32 },
33 tests_require=["datasette-auth-existing-cookies[test]"],
datasette-auth-existing-cookies/test_datasette_auth_existing_cookies.py
3 import json
4 import httpx
5 import pytest
6
7 from itsdangerous import URLSafeSerializer
22
23
24 @pytest.mark.asyncio
25 @pytest.mark.parametrize("path", ["/", "/fixtures", "/foo/bar"])
26 @pytest.mark.parametrize("next_secret", [None, "secret"])
27 async def test_redirects_to_login_page(next_secret, path):
28 auth_app = ExistingCookiesAuthTest(
53
54
55 @pytest.mark.asyncio
56 @pytest.mark.parametrize("trust_it", [True, False])
57 async def test_redirects_to_login_page_trusting_x_forwarded_proto(trust_it):
58 auth_app = ExistingCookiesAuthTest(
82
83
84 @pytest.mark.asyncio
85 async def test_allow_access_if_auth_is_returned():
86 auth_app = ExistingCookiesAuthTest(
106
107
108 @pytest.mark.asyncio
109 async def test_access_denied():
110 auth_app = ExistingCookiesAuthTest(
126
127
128 @pytest.mark.asyncio
129 async def test_headers_to_forward():
130 auth_app = ExistingCookiesAuthTest(
196
197
198 @pytest.mark.asyncio
199 async def test_scope_auth_allows_access():
200 # This test can't use httpx because I need a custom scope
datasette-auth-github/setup.py
28 "test": [
29 "datasette",
30 "pytest",
31 "pytest-asyncio",
32 "sqlite-utils",
33 "pytest-httpx",
34 ]
35 },
datasette-auth-passwords/README.md
datasette-auth-passwords/setup.py
30 entry_points={"datasette": ["auth_passwords = datasette_auth_passwords"]},
31 install_requires=["datasette>=0.44"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-auth-passwords[test]"],
34 package_data={
datasette-auth-tokens/setup.py
25 entry_points={"datasette": ["auth_tokens = datasette_auth_tokens"]},
26 install_requires=["datasette>=0.44",],
27 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
28 tests_require=["datasette-auth-tokens[test]"],
29 )
datasette-backup/README.md
38 To run the tests:
39
40 pytest
datasette-backup/setup.py
30 entry_points={"datasette": ["backup = datasette_backup"]},
31 install_requires=["datasette", "sqlite-dump>=0.1.1"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
33 tests_require=["datasette-backup[test]"],
34 )
datasette-basemap/README.md
44 To run the tests:
45
46 pytest
datasette-basemap/setup.py
30 entry_points={"datasette": ["basemap = datasette_basemap"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio"]},
33 tests_require=["datasette-basemap[test]"],
34 package_data={"datasette_basemap": ["data/*"]},
datasette-block/README.md
54 To run the tests:
55
56 pytest
datasette-block/setup.py
30 entry_points={"datasette": ["block = datasette_block"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "asgi-lifespan"]},
33 tests_require=["datasette-block[test]"],
34 python_requires=">=3.6",
datasette-bplist/setup.py
25 entry_points={"datasette": ["bplist = datasette_bplist"]},
26 install_requires=["datasette", "bpylist"],
27 extras_require={"test": ["pytest"]},
28 tests_require=["datasette-bplist[test]"],
29 )
datasette-block-robots/README.md
74 To run the tests:
75
76 pytest
datasette-block-robots/setup.py
30 entry_points={"datasette": ["block_robots = datasette_block_robots"]},
31 install_requires=["datasette>=0.50"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-block-robots[test]"],
34 )
datasette-clone/setup.py
33 """,
34 install_requires=["requests", "click"],
35 extras_require={"test": ["pytest", "requests-mock"]},
36 tests_require=["datasette-auth-github[test]"],
37 )
datasette-cluster-map/README.md
datasette-cluster-map/setup.py
36 },
37 install_requires=["datasette>=0.54", "datasette-leaflet>=0.2.2"],
38 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
39 tests_require=["datasette-cluster-map[test]"],
40 )
datasette-column-inspect/setup.py
27 "datasette",
28 ],
29 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
30 tests_require=["datasette-column-inspect[test]"],
31 package_data={"datasette_column_inspect": ["templates/*.html"]},
datasette-configure-fts/setup.py
25 entry_points={"datasette": ["configure_fts = datasette_configure_fts"]},
26 install_requires=["datasette>=0.51", "sqlite-utils>=2.10"],
27 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
28 tests_require=["datasette-configure-fts[test]"],
29 package_data={"datasette_configure_fts": ["templates/*.html"]},
datasette-configure-asgi/setup.py
25 py_modules=["datasette_configure_asgi"],
26 extras_require={
27 "test": ["pytest", "pytest-asyncio", "asgiref==3.1.2", "datasette"]
28 },
29 )
datasette-copyable/README.md
43 To run the tests:
44
45 pytest
datasette-copyable/setup.py
30 entry_points={"datasette": ["copyable = datasette_copyable"]},
31 install_requires=["datasette>=0.49", "tabulate"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
33 tests_require=["datasette-copyable[test]"],
34 package_data={"datasette_copyable": ["templates/*.html"]},
datasette-css-properties/README.md
datasette-css-properties/setup.py
30 entry_points={"datasette": ["css_properties = datasette_css_properties"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio"]},
33 tests_require=["datasette-css-properties[test]"],
34 python_requires=">=3.6",
datasette-cors/setup.py
26 install_requires=["asgi-cors~=0.3"],
27 extras_require={
28 "test": ["datasette~=0.29", "pytest", "pytest-asyncio", "asgiref~=3.1.2"]
29 },
30 tests_require=["datasette-cors[test]"],
datasette-cors/test_datasette_cors.py
1 import json
2
3 import pytest
4 from asgiref.testing import ApplicationCommunicator
5 from datasette.app import Datasette
6
7
8 @pytest.mark.asyncio
9 async def test_datasette_cors_plugin_installed():
10 instance = ApplicationCommunicator(
32
33
34 @pytest.mark.asyncio
35 @pytest.mark.parametrize(
36 "request_origin,expected_cors_header",
37 [
datasette-dateutil/README.md
datasette-dateutil/setup.py
30 entry_points={"datasette": ["dateutil = datasette_dateutil"]},
31 install_requires=["datasette", "python-dateutil"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-dateutil[test]"],
34 )
datasette-dns/README.md
40 To run the tests:
41
42 pytest
datasette-dns/setup.py
30 entry_points={"datasette": ["dns = datasette_dns"]},
31 install_requires=["datasette", "dnspython"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "pytest-mock"]},
33 tests_require=["datasette-dns[test]"],
34 )
datasette-debug-asgi/setup.py
30 entry_points={"datasette": ["debug_asgi = datasette_debug_asgi"]},
31 install_requires=["datasette>=0.50"],
32 extras_require={"test": ["pytest", "pytest-asyncio"]},
33 tests_require=["datasette-debug-asgi[test]"],
34 python_requires=">=3.6",
datasette-debug-asgi/test_datasette_debug_asgi.py
1 from datasette.app import Datasette
2 import pytest
3
4
5 @pytest.mark.asyncio
6 async def test_datasette_debug_asgi():
7 ds = Datasette([], memory=True)
datasette-edit-schema/README.md
55 To run the tests:
56
57 pytest
datasette-edit-schema/setup.py
28 "sqlite-utils>=2.21",
29 ],
30 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
31 tests_require=["datasette-edit-schema[test]"],
32 package_data={"datasette_edit_schema": ["templates/*.html", "static/*.js"]},
datasette-edit-templates/README.md
39 To run the tests:
40
41 pytest
datasette-edit-templates/setup.py
36 ],
37 },
38 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
39 tests_require=["datasette-edit-templates[test]"],
40 python_requires=">=3.6",
datasette-export-notebook/README.md
40 To run the tests:
41
42 pytest
datasette-export-notebook/setup.py
30 entry_points={"datasette": ["export_notebook = datasette_export_notebook"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "sqlite-utils"]},
33 tests_require=["datasette-export-notebook[test]"],
34 package_data={"datasette_export_notebook": ["templates/*.html"]},
datasette-glitch/setup.py
30 entry_points={"datasette": ["glitch = datasette_glitch"]},
31 install_requires=["datasette>=0.45"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-glitch[test]"],
34 )
datasette-haversine/setup.py
25 entry_points={"datasette": ["haversine = datasette_haversine"]},
26 install_requires=["datasette", "haversine"],
27 extras_require={"test": ["pytest"]},
28 tests_require=["datasette-haversine[test]"],
29 )
datasette-graphql/README.md
datasette-graphql/setup.py
30 entry_points={"datasette": ["graphql = datasette_graphql"]},
31 install_requires=["datasette>=0.51", "graphene>=2.0,<3.0", "sqlite-utils", "wrapt"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-graphql[test]"],
34 package_data={"datasette_graphql": ["templates/*.html", "static/*"]},
datasette-ics/setup.py
25 entry_points={"datasette": ["ics = datasette_ics"]},
26 install_requires=["datasette>=0.49", "ics==0.7"],
27 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
28 tests_require=["datasette-ics[test]"],
29 )
datasette-import-table/README.md
40 To run the tests:
41
42 pytest
datasette-import-table/setup.py
30 entry_points={"datasette": ["import_table = datasette_import_table"]},
31 install_requires=["datasette", "httpx", "sqlite-utils"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "pytest-httpx"]},
33 tests_require=["datasette-import-table[test]"],
34 package_data={"datasette_import_table": ["templates/*.html"]},
datasette-indieauth/README.md
84 To run the tests:
85
86 pytest
datasette-indieauth/setup.py
31 install_requires=["datasette"],
32 extras_require={
33 "test": ["pytest", "pytest-asyncio", "httpx", "pytest-httpx", "mf2py"]
34 },
35 tests_require=["datasette-indieauth[test]"],
datasette-init/README.md
91 To run the tests:
92
93 pytest
datasette-init/setup.py
30 entry_points={"datasette": ["init = datasette_init"]},
31 install_requires=["datasette>=0.45", "sqlite-utils"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-init[test]"],
34 )
datasette-insert/README.md
datasette-insert/setup.py
31 install_requires=["datasette>=0.46", "sqlite-utils"],
32 extras_require={
33 "test": ["pytest", "pytest-asyncio", "httpx", "datasette-auth-tokens"]
34 },
35 tests_require=["datasette-insert[test]"],
datasette-insert-unsafe/README.md
37 To run the tests:
38
39 pytest
datasette-insert-unsafe/setup.py
30 entry_points={"datasette": ["insert_unsafe = datasette_insert_unsafe"]},
31 install_requires=["datasette", "datasette-insert>=0.6"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
33 tests_require=["datasette-insert-unsafe[test]"],
34 )
datasette-jellyfish/setup.py
30 entry_points={"datasette": ["jellyfish = datasette_jellyfish"]},
31 install_requires=["datasette", "jellyfish>=0.8.2"],
32 extras_require={"test": ["pytest"]},
33 tests_require=["datasette-jellyfish[test]"],
34 )
datasette-jq/setup.py
27 extras_require={
28 "test": [
29 "pytest"
30 ]
31 },
datasette-json-preview/README.md
46 To run the tests:
47
48 pytest
datasette-json-preview/setup.py
30 entry_points={"datasette": ["json_preview = datasette_json_preview"]},
31 install_requires=["datasette>=0.55"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
33 tests_require=["datasette-json-preview[test]"],
34 )
datasette-json-html/setup.py
30 entry_points={"datasette": ["json_html = datasette_json_html"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-json-html[test]"],
34 )
datasette-leaflet/setup.py
30 entry_points={"datasette": ["leaflet = datasette_leaflet"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio"]},
33 tests_require=["datasette-leaflet[test]"],
34 package_data={
datasette-leaflet-freedraw/README.md
65 To run the tests:
66
67 pytest
datasette-leaflet-freedraw/setup.py
30 entry_points={"datasette": ["leaflet_freedraw = datasette_leaflet_freedraw"]},
31 install_requires=["datasette>=0.54", "datasette-leaflet>=0.2"],
32 extras_require={"test": ["pytest", "pytest-asyncio"]},
33 tests_require=["datasette-leaflet-freedraw[test]"],
34 package_data={
datasette-leaflet-geojson/setup.py
26 package_data={"datasette_leaflet_geojson": ["static/datasette-leaflet-geojson.js"]},
27 install_requires=["datasette>=0.54", "datasette-leaflet>=0.2"],
28 extras_require={"test": ["pytest", "pytest-asyncio"]},
29 tests_require=["datasette-leaflet-geojson[test]"],
30 )
datasette-mask-columns/setup.py
25 entry_points={"datasette": ["mask_columns = datasette_mask_columns"]},
26 install_requires=["datasette~=0.36"],
27 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
28 tests_require=["datasette-mask-columns[test]"],
29 )
datasette-media/setup.py
33 "test": [
34 "asgiref",
35 "pytest",
36 "pytest-asyncio",
37 "sqlite-utils",
38 "pytest-httpx>=0.4.0",
39 ],
40 "heif": ["pyheif>=0.4"],
datasette-permissions-sql/setup.py
25 entry_points={"datasette": ["permissions_sql = datasette_permissions_sql"]},
26 install_requires=["datasette>=0.44",],
27 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils~=2.0"]},
28 tests_require=["datasette-permissions-sql[test]"],
29 )
datasette-plugin/README.md
64 You can run the default test for your plugin like so:
65
66 pytest
67
68 This will execute the test in `tests/test_my_new_plugin.py`, which confirms that the plugin has been installed.
datasette-plugin/requirements.txt
1 cookiecutter
2 pytest
3
datasette-plugin-demos/README.md
33 To run the tests:
34
35 pytest
datasette-plugin-demos/setup.py
16 },
17 install_requires=["datasette"],
18 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
19 tests_require=["datasette-plugin-demos[test]"],
20 )
datasette-plugin-template-demo/README.md
36 To run the tests:
37
38 pytest
datasette-plugin-template-demo/setup.py
30 entry_points={"datasette": ["plugin_template_demo = datasette_plugin_template_demo"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio"]},
33 tests_require=["datasette-plugin-template-demo[test]"],
34 package_data={
datasette-pretty-json/setup.py
25 entry_points={"datasette": ["pretty_json = datasette_pretty_json"]},
26 install_requires=["datasette"],
27 extras_require={"test": ["pytest"]},
28 )
datasette-publish-fly/setup.py
30 entry_points={"datasette": ["publish_fly = datasette_publish_fly"]},
31 install_requires=["datasette>=0.44"],
32 extras_require={"test": ["pytest"]},
33 tests_require=["datasette-publish-fly[test]"],
34 )
datasette-psutil/setup.py
25 entry_points={"datasette": ["psutil = datasette_psutil"]},
26 install_requires=["datasette>=0.44", "psutil"],
27 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
28 tests_require=["datasette-psutil[test]"],
29 )
datasette-publish-vercel/setup.py
30 entry_points={"datasette": ["publish_vercel = datasette_publish_vercel"]},
31 install_requires=["datasette>=0.52"],
32 extras_require={"test": ["pytest"]},
33 tests_require=["datasette-publish-vercel[test]"],
34 )
datasette-registry/requirements.txt
1 conformity
2 pytest
3 requests
4 datasette
datasette-render-binary/setup.py
25 entry_points={"datasette": ["render_binary = datasette_render_binary"]},
26 install_requires=["datasette", "filetype"],
27 extras_require={"test": ["pytest"]},
28 tests_require=["datasette-render-binary[test]"],
29 )
datasette-render-html/setup.py
25 entry_points={"datasette": ["render_html = datasette_render_html"]},
26 install_requires=["datasette"],
27 extras_require={"test": ["pytest"]},
28 )
datasette-render-html/test_datasette_render_html.py
2 from datasette.app import Datasette
3 from markupsafe import Markup
4 import pytest
5
6
7 @pytest.fixture
8 def configured_datasette():
9 return Datasette(
datasette-render-markdown/setup.py
25 entry_points={"datasette": ["render_markdown = datasette_render_markdown"]},
26 install_requires=["datasette", "markdown", "bleach"],
27 extras_require={"test": ["pytest", "pytest-asyncio"]},
28 tests_require=["datasette-render-markdown[test]"],
29 )
datasette-render-images/setup.py
25 entry_points={"datasette": ["render_images = datasette_render_images"]},
26 install_requires=["datasette"],
27 extras_require={"test": ["pytest"]},
28 tests_require=["datasette-auth-tokens[test]"],
29 )
datasette-render-images/test_datasette_render_images.py
2 from datasette.app import Datasette
3 import jinja2
4 import pytest
5
6 GIF_1x1 = b"GIF89a\x01\x00\x01\x00\x80\x00\x00\x00\x00\x00\xff\xff\xff!\xf9\x04\x01\x00\x00\x00\x00,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x01D\x00;"
10
11
12 @pytest.mark.parametrize(
13 "input,expected",
14 [
datasette-render-timestamps/setup.py
25 entry_points={"datasette": ["render_timestamps = datasette_render_timestamps"]},
26 install_requires=["datasette"],
27 extras_require={"test": ["pytest"]},
28 tests_require=["datasette-render-timestamps[test]"],
29 )
datasette-ripgrep/README.md
71 To run the tests:
72
73 pytest
datasette-ripgrep/setup.py
31 package_data={"datasette_ripgrep": ["templates/*.html"]},
32 install_requires=["datasette"],
33 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
34 tests_require=["datasette-ripgrep[test]"],
35 python_requires=">=3.6",
datasette-rure/setup.py
25 entry_points={"datasette": ["rure = datasette_rure"]},
26 install_requires=["datasette", "rure"],
27 extras_require={"test": ["pytest"]},
28 tests_require=["datasette-rure[test]"],
29 )
datasette-saved-queries/README.md
39 To run the tests:
40
41 pytest
datasette-saved-queries/setup.py
30 entry_points={"datasette": ["saved_queries = datasette_saved_queries"]},
31 install_requires=["datasette>=0.45", "sqlite-utils"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33 tests_require=["datasette-saved-queries[test]"],
34 )
datasette-schema-versions/setup.py
30 entry_points={"datasette": ["schema_versions = datasette_schema_versions"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
33 tests_require=["datasette-schema-versions[test]"],
34 )
datasette-seaborn/README.md
54 To run the tests:
55
56 pytest
datasette-seaborn/setup.py
30 entry_points={"datasette": ["seaborn = datasette_seaborn"]},
31 install_requires=["datasette>=0.50", "seaborn>=0.11.0"],
32 extras_require={"test": ["pytest", "pytest-asyncio"]},
33 tests_require=["datasette-seaborn[test]"],
34 )
datasette-search-all/setup.py
31 package_data={"datasette_search_all": ["templates/*.html"]},
32 install_requires=["datasette>=0.51"],
33 extras_require={"test": ["pytest", "pytest-asyncio", "sqlite-utils"]},
34 tests_require=["datasette-search-all[test]"],
35 )
datasette-sentry/setup.py
25 py_modules=["datasette_sentry"],
26 install_requires=["sentry-sdk"],
27 extras_require={"test": ["pytest", "datasette"]},
28 classifiers=[
29 "License :: OSI Approved :: Apache Software License",
datasette-show-errors/setup.py
26 install_requires=["starlette", "datasette"],
27 extras_require={
28 "test": ["pytest"]
29 },
30 )
datasette-tiles/README.md
datasette-tiles/setup.py
30 entry_points={"datasette": ["tiles = datasette_tiles"]},
31 install_requires=["datasette", "datasette-leaflet>=0.2.2"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "datasette-basemap>=0.2"]},
33 tests_require=["datasette-tiles[test]"],
34 package_data={"datasette_tiles": ["templates/*"]},
datasette-template-sql/setup.py
30 entry_points={"datasette": ["template-sql = datasette_template_sql"]},
31 install_requires=["datasette>=0.54"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "sqlite-utils"]},
33 tests_require=["datasette-template-sql[test]"],
34 python_requires=">=3.6",
datasette-upload-csvs/setup.py
33 ],
34 extras_require={
35 "test": ["pytest", "pytest-asyncio", "asgiref", "httpx", "asgi-lifespan"]
36 },
37 package_data={"datasette_upload_csvs": ["templates/*.html"]},
datasette-vega/setup.py
69 },
70 install_requires=["datasette"],
71 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
72 )
datasette-write/README.md
39 To run the tests:
40
41 pytest
datasette-write/setup.py
31 entry_points={"datasette": ["write = datasette_write"]},
32 install_requires=["datasette>=0.45"],
33 extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
34 tests_require=["datasette-write[test]"],
35 )
datasette-yaml/README.md
42 To run the tests:
43
44 pytest
datasette-yaml/setup.py
30 entry_points={"datasette": ["yaml = datasette_yaml"]},
31 install_requires=["datasette"],
32 extras_require={"test": ["pytest", "pytest-asyncio", "httpx", "sqlite-utils"]},
33 tests_require=["datasette-yaml[test]"],
34 )
db-to-sqlite/setup.py
25 install_requires=["sqlalchemy", "sqlite-utils>=2.9.1", "click"],
26 extras_require={
27 "test": ["pytest"],
28 "test_mysql": ["pytest", "mysqlclient"],
29 "test_postgresql": ["pytest", "psycopg2"],
30 "mysql": ["mysqlclient"],
31 "postgresql": ["psycopg2"],
dogsheep-photos/setup.py
32 "osxphotos>=0.28.13 ; sys_platform=='darwin'",
33 ],
34 extras_require={"test": ["pytest"]},
35 tests_require=["dogsheep-photos[test]"],
36 )
dogsheep-beta/README.md
dogsheep-beta/setup.py
35 install_requires=["datasette>=0.50.2", "click", "PyYAML", "sqlite-utils>=3.0"],
36 extras_require={
37 "test": ["pytest", "pytest-asyncio", "httpx", "beautifulsoup4", "html5lib"]
38 },
39 tests_require=["dogsheep-beta[test]"],
evernote-to-sqlite/README.md
50 To run the tests:
51
52 pytest
evernote-to-sqlite/setup.py
33 """,
34 install_requires=["click", "sqlite-utils>=3.0"],
35 extras_require={"test": ["pytest"]},
36 tests_require=["evernote-to-sqlite[test]"],
37 )
fec-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils", "click", "requests", "fecfile", "tqdm"],
30 extras_require={"test": ["pytest"]},
31 tests_require=["fec-to-sqlite[test]"],
32 )
genome-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils"],
30 extras_require={"test": ["pytest"]},
31 tests_require=["genome-to-sqlite[test]"],
32 )
geojson-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils>=2.2", "shapely"],
30 extras_require={"test": ["pytest"]},
31 tests_require=["geojson-to-sqlite[test]"],
32 )
github-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils>=2.7.2", "requests", "PyYAML"],
30 extras_require={"test": ["pytest", "requests-mock", "bs4"]},
31 tests_require=["github-to-sqlite[test]"],
32 )
google-takeout-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils~=1.11"],
30 extras_require={"test": ["pytest"]},
31 tests_require=["google-takeout-to-sqlite[test]"],
32 )
hacker-news-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils", "click", "requests", "tqdm"],
30 extras_require={"test": ["pytest", "requests-mock"]},
31 tests_require=["hacker-news-to-sqlite[test]"],
32 )
healthkit-to-sqlite/setup.py
33 """,
34 install_requires=["sqlite-utils>=2.4.4"],
35 extras_require={"test": ["pytest"]},
36 tests_require=["healthkit-to-sqlite[test]"],
37 )
inaturalist-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils>=2.0", "click", "requests"],
30 extras_require={"test": ["pytest"]},
31 tests_require=["inaturalist-to-sqlite[test]"],
32 )
markdown-to-sqlite/setup.cfg
1 [aliases]
2 test=pytest
markdown-to-sqlite/setup.py
24 packages=find_packages(),
25 install_requires=["yamldown", "markdown", "sqlite-utils", "click"],
26 extras_require={"test": ["pytest"]},
27 tests_require=["markdown-to-sqlite[test]"],
28 setup_requires=["pytest-runner"],
29 entry_points="""
30 [console_scripts]
pocket-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils>=2.4.4", "click", "requests"],
30 extras_require={"test": ["pytest"]},
31 tests_require=["pocket-to-sqlite[test]"],
32 )
sphinx-to-sqlite/README.md
46 To run the tests:
47
48 pytest
sphinx-to-sqlite/setup.py
33 """,
34 install_requires=["click", "sqlite-utils"],
35 extras_require={"test": ["pytest"]},
36 tests_require=["sphinx-to-sqlite[test]"],
37 )
shapefile-to-sqlite/setup.py
28 """,
29 install_requires=["sqlite-utils>=2.2", "Shapely", "Fiona", "pyproj"],
30 extras_require={"test": ["pytest"]},
31 tests_require=["shapefile-to-sqlite[test]"],
32 )
shapefile-to-sqlite/pytest.ini
1 [pytest]
2 filterwarnings =
3 ignore:.*the imp module is deprecated.*:DeprecationWarning
sqlite-diffable/setup.py
24 packages=find_packages(exclude="tests"),
25 install_requires=["click", "sqlite-utils"],
26 extras_require={"test": ["pytest", "black"]},
27 entry_points="""
28 [console_scripts]
sqlite-fts4/setup.py
28 version=VERSION,
29 packages=["sqlite_fts4"],
30 extras_require={"test": ["pytest"]},
31 tests_require=["sqlite-fts4[test]"],
32 )
sqlite-dump/README.md
58 To run the tests:
59
60 pytest
sqlite-dump/setup.py
29 packages=["sqlite_dump"],
30 install_requires=[],
31 extras_require={"test": ["pytest", "sqlite-utils"]},
32 tests_require=["sqlite-dump[test]"],
33 )
sqlite-generate/README.md
87 To run the tests:
88
89 pytest
sqlite-generate/setup.py
33 """,
34 install_requires=["click", "Faker", "sqlite-utils"],
35 extras_require={"test": ["pytest"]},
36 tests_require=["sqlite-generate[test]"],
37 )
sqlite-transform/setup.py
28 """,
29 install_requires=["dateutils", "tqdm", "click"],
30 extras_require={"test": ["pytest", "sqlite-utils"]},
31 tests_require=["sqlite-transform[test]"],
32 )
sqlite-utils/setup.py
24 packages=find_packages(exclude=["tests", "tests.*"]),
25 install_requires=["sqlite-fts4", "click", "click-default-group", "tabulate"],
26 setup_requires=["pytest-runner"],
27 extras_require={
28 "test": ["pytest", "black", "hypothesis"],
29 "docs": ["sphinx_rtd_theme", "sphinx-autobuild"],
30 },
swarm-to-sqlite/setup.py
33 """,
34 install_requires=["sqlite-utils>=3.3", "click", "requests"],
35 extras_require={"test": ["pytest"]},
36 tests_require=["swarm-to-sqlite[test]"],
37 )
til/README.md
89 * [Restricting SSH connections to devices within a Tailscale network](https://github.com/simonw/til/blob/main/tailscale/lock-down-sshd.md) - 2020-04-23
90
91 ## pytest
92
93 * [Session-scoped temporary directories in pytest](https://github.com/simonw/til/blob/main/pytest/session-scoped-tmp.md) - 2020-04-26
94 * [How to mock httpx using pytest-mock](https://github.com/simonw/til/blob/main/pytest/mock-httpx.md) - 2020-04-29
95 * [Asserting a dictionary is a subset of another dictionary](https://github.com/simonw/til/blob/main/pytest/assert-dictionary-subset.md) - 2020-05-28
96 * [Registering temporary pluggy plugins inside tests](https://github.com/simonw/til/blob/main/pytest/registering-plugins-in-tests.md) - 2020-07-21
97 * [Code coverage using pytest and codecov.io](https://github.com/simonw/til/blob/main/pytest/pytest-code-coverage.md) - 2020-08-15
98 * [Start a server in a subprocess during a pytest session](https://github.com/simonw/til/blob/main/pytest/subprocess-server.md) - 2020-08-31
99
100 ## github
148 * [PostgreSQL full-text search in the Django Admin](https://github.com/simonw/til/blob/main/django/postgresql-full-text-search-admin.md) - 2020-07-25
149 * [Adding extra read-only information to a Django admin change page](https://github.com/simonw/til/blob/main/django/extra-read-only-admin-information.md) - 2021-02-25
150 * [Writing tests for the Django admin with pytest-django](https://github.com/simonw/til/blob/main/django/testing-django-admin-with-pytest.md) - 2021-03-02
151
152 ## docker
228 ## cookiecutter
229
230 * [Testing cookiecutter templates with pytest](https://github.com/simonw/til/blob/main/cookiecutter/pytest-for-cookiecutter.md) - 2021-01-27
231 * [Conditionally creating directories in cookiecutter](https://github.com/simonw/til/blob/main/cookiecutter/conditionally-creating-directories.md) - 2021-01-27
232
twitter-to-sqlite/setup.py
32 "python-dateutil",
33 ],
34 extras_require={"test": ["pytest"]},
35 tests_require=["twitter-to-sqlite[test]"],
36 )
yaml-to-sqlite/setup.py
24 packages=find_packages(),
25 install_requires=["click", "PyYAML", "sqlite-utils"],
26 setup_requires=["pytest-runner"],
27 extras_require={"test": ["pytest"]},
28 entry_points="""
29 [console_scripts]
csvs-to-sqlite/tests/test_utils.py
1 from csvs_to_sqlite import utils
2 import pytest
3 import sqlite3
4 import pandas as pd
12
13
14 @pytest.mark.parametrize("table,expected", [("foo", True), ("bar", False)])
15 def test_table_exists(table, expected):
16 conn = sqlite3.connect(":memory:")
datasette/datasette/cli.py
540 asyncio.get_event_loop().run_until_complete(ds.invoke_startup())
541
542 # Run async sanity checks - but only if we're not under pytest
543 asyncio.get_event_loop().run_until_complete(check_databases(ds))
544
datasette/docs/changelog.rst
78 - Better error message for disallowed ``PRAGMA`` clauses in SQL queries. (:issue:`1185`)
79 - ``datasette publish heroku`` now deploys using ``python-3.8.7``.
80 - New plugin testing documentation on :ref:`testing_plugins_pytest_httpx`. (:issue:`1198`)
81 - All ``?_*`` query string parameters passed to the table page are now persisted in hidden form fields, so parameters such as ``?_size=10`` will be correctly passed to the next page when query filters are changed. (:issue:`1194`)
82 - Fixed a bug loading a database file called ``test-database (1).sqlite``. (:issue:`1181`)
400 - :ref:`writing_plugins` describes how to author plugins, from one-off single file plugins to packaged plugins that can be published to PyPI. It also describes how to start a plugin using the new `datasette-plugin <https://github.com/simonw/datasette-plugin>`__ cookiecutter template.
401 - :ref:`plugin_hooks` is a full list of detailed documentation for every Datasette plugin hook.
402 - :ref:`testing_plugins` describes how to write tests for Datasette plugins, using `pytest <https://docs.pytest.org/>`__ and `HTTPX <https://www.python-httpx.org/>`__.
403
404 New plugin hooks
1010 - New project guideline: master should stay shippable at all times! (`31f36e1 <https://github.com/simonw/datasette/commit/31f36e1b97ccc3f4387c80698d018a69798b6228>`__)
1011 - Fixed a bug where ``sqlite_timelimit()`` occasionally failed to clean up after itself (`bac4e01 <https://github.com/simonw/datasette/commit/bac4e01f40ae7bd19d1eab1fb9349452c18de8f5>`__)
1012 - We no longer load additional plugins when executing pytest (:issue:`438`)
1013 - Homepage now links to database views if there are less than five tables in a database (:issue:`373`)
1014 - The ``--cors`` option is now respected by error pages (:issue:`453`)
1063 - Added documentation on :ref:`how to build the documentation <contributing_documentation>`
1064 - Added documentation covering :ref:`our release process <contributing_release>`
1065 - Upgraded to pytest 4.0.2
1066
1067 .. _v0_25_1:
datasette/docs/contributing.rst
44 That last line does most of the work: ``pip install -e`` means "install this package in a way that allows me to edit the source code in place". The ``.[test]`` option means "use the setup.py in this directory and install the optional testing dependencies as well".
45
46 Once you have done this, you can run the Datasette unit tests from inside your ``datasette/`` directory using `pytest <https://docs.pytest.org/>`__ like so::
47
48 pytest
49
50 To run Datasette itself, type ``datasette``.
datasette/docs/plugin_hooks.rst
734 .. code-block:: python
735
736 @pytest.mark.asyncio
737 async def test_my_plugin():
738 ds = Datasette([], metadata={})
datasette/docs/testing_plugins.rst
4 ===============
5
6 We recommend using `pytest <https://docs.pytest.org/>`__ to write automated tests for your plugins.
7
8 If you use the template described in :ref:`writing_plugins_cookiecutter` your plugin will start with a single test in your ``tests/`` directory that looks like this:
11
12 from datasette.app import Datasette
13 import pytest
14
15
16 @pytest.mark.asyncio
17 async def test_plugin_is_installed():
18 datasette = Datasette([], memory=True)
25 This test uses the :ref:`internals_datasette_client` object to exercise a test instance of Datasette. ``datasette.client`` is a wrapper around the `HTTPX <https://www.python-httpx.org/>`__ Python library which can imitate HTTP requests using ASGI. This is the recommended way to write tests against a Datasette instance.
26
27 This test also uses the `pytest-asyncio <https://pypi.org/project/pytest-asyncio/>`__ package to add support for ``async def`` test functions running under pytest.
28
29 You can install these packages like so::
30
31 pip install pytest pytest-asyncio
32
33 If you are building an installable package you can add them as test dependencies to your ``setup.py`` module like this:
39 # ...
40 extras_require={
41 "test": ["pytest", "pytest-asyncio"]
42 },
43 tests_require=["datasette-my-plugin[test]"],
48 pip install -e '.[test]'
49
50 Then run the tests using pytest like so::
51
52 pytest
53
54 .. _testing_plugins_pdb:
69 response = await ds.client.get("/")
70
71 If you use this pattern you will need to run ``pytest`` with the ``-s`` option to avoid capturing stdin/stdout in order to interact with the debugger prompt.
72
73 .. _testing_plugins_fixtures:
74
75 Using pytest fixtures
76 ---------------------
77
78 `Pytest fixtures <https://docs.pytest.org/en/stable/fixture.html>`__ can be used to create initial testable objects which can then be used by multiple tests.
79
80 A common pattern for Datasette plugins is to create a fixture which sets up a temporary test database and wraps it in a Datasette instance.
85
86 from datasette.app import Datasette
87 import pytest
88 import sqlite_utils
89
90 @pytest.fixture(scope="session")
91 def datasette(tmp_path_factory):
92 db_directory = tmp_path_factory.mktemp("dbs")
113 return datasette
114
115 @pytest.mark.asyncio
116 async def test_example_table_json(datasette):
117 response = await datasette.client.get("/test/dogs.json?_shape=array")
122 ]
123
124 @pytest.mark.asyncio
125 async def test_example_table_html(datasette):
126 response = await datasette.client.get("/test/dogs")
127 assert ">Some dogs</h1>" in response.text
128
129 Here the ``datasette()`` function defines the fixture, which is than automatically passed to the two test functions based on pytest automatically matching their ``datasette`` function parameters.
130
131 The ``@pytest.fixture(scope="session")`` line here ensures the fixture is reused for the full ``pytest`` execution session. This means that the temporary database file will be created once and reused for each test.
132
133 If you want to create that test database repeatedly for every individual test function, write the fixture function like this instead. You may want to do this if your plugin modifies the database contents in some way:
135 .. code-block:: python
136
137 @pytest.fixture
138 def datasette(tmp_path_factory):
139 # This fixture will be executed repeatedly for every test
140
141 .. _testing_plugins_pytest_httpx:
142
143 Testing outbound HTTP calls with pytest-httpx
144 ---------------------------------------------
145
146 If your plugin makes outbound HTTP calls - for example datasette-auth-github or datasette-import-table - you may need to mock those HTTP requests in your tests.
147
148 The `pytest-httpx <https://pypi.org/project/pytest-httpx/>`__ package is a useful library for mocking calls. It can be tricky to use with Datasette though since it mocks all HTTPX requests, and Datasette's own testing mechanism uses HTTPX internally.
149
150 To avoid breaking your tests, you can return ``["localhost"]`` from the ``non_mocked_hosts()`` fixture.
186
187 from datasette.app import Datasette
188 import pytest
189
190
191 @pytest.fixture
192 def non_mocked_hosts():
193 # This ensures httpx-mock will not affect Datasette's own
datasette/tests/conftest.py
1 import os
2 import pathlib
3 import pytest
4 import re
5 import subprocess
23
24
25 def pytest_report_header(config):
26 return "SQLite: {}".format(
27 sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0]
29
30
31 def pytest_configure(config):
32 import sys
33
35
36
37 def pytest_unconfigure(config):
38 import sys
39
41
42
43 def pytest_collection_modifyitems(items):
44 # Ensure test_cli.py and test_black.py and test_inspect.py run first before any asyncio code kicks in
45 move_to_front(items, "test_cli")
60
61
62 @pytest.fixture
63 def restore_working_directory(tmpdir, request):
64 previous_cwd = os.getcwd()
71
72
73 @pytest.fixture(scope="session", autouse=True)
74 def check_permission_actions_are_documented():
75 from datasette.plugins import pm
99
100
101 @pytest.fixture(scope="session")
102 def ds_localhost_http_server():
103 ds_proc = subprocess.Popen(
113 assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8")
114 yield ds_proc
115 # Shut it down at the end of the pytest session
116 ds_proc.terminate()
117
118
119 @pytest.fixture(scope="session")
120 def ds_localhost_https_server(tmp_path_factory):
121 cert_directory = tmp_path_factory.mktemp("certs")
150 assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8")
151 yield ds_proc, client_cert
152 # Shut it down at the end of the pytest session
153 ds_proc.terminate()
datasette/tests/fixtures.py
8 import os
9 import pathlib
10 import pytest
11 import random
12 import sys
157
158
159 @pytest.fixture(scope="session")
160 def app_client():
161 with make_app_client() as client:
163
164
165 @pytest.fixture(scope="session")
166 def app_client_no_files():
167 ds = Datasette([])
169
170
171 @pytest.fixture(scope="session")
172 def app_client_base_url_prefix():
173 with make_app_client(config={"base_url": "/prefix/"}) as client:
175
176
177 @pytest.fixture(scope="session")
178 def app_client_two_attached_databases():
179 with make_app_client(
183
184
185 @pytest.fixture(scope="session")
186 def app_client_two_attached_databases_crossdb_enabled():
187 with make_app_client(
192
193
194 @pytest.fixture(scope="session")
195 def app_client_conflicting_database_names():
196 with make_app_client(
200
201
202 @pytest.fixture(scope="session")
203 def app_client_two_attached_databases_one_immutable():
204 with make_app_client(
208
209
210 @pytest.fixture(scope="session")
211 def app_client_with_hash():
212 with make_app_client(config={"hash_urls": True}, is_immutable=True) as client:
214
215
216 @pytest.fixture(scope="session")
217 def app_client_shorter_time_limit():
218 with make_app_client(20) as client:
220
221
222 @pytest.fixture(scope="session")
223 def app_client_returned_rows_matches_page_size():
224 with make_app_client(max_returned_rows=50) as client:
226
227
228 @pytest.fixture(scope="session")
229 def app_client_larger_cache_size():
230 with make_app_client(config={"cache_size_kb": 2500}) as client:
232
233
234 @pytest.fixture(scope="session")
235 def app_client_csv_max_mb_one():
236 with make_app_client(config={"max_csv_mb": 1}) as client:
238
239
240 @pytest.fixture(scope="session")
241 def app_client_with_dot():
242 with make_app_client(filename="fixtures.dot.db") as client:
244
245
246 @pytest.fixture(scope="session")
247 def app_client_with_cors():
248 with make_app_client(is_immutable=True, cors=True) as client:
250
251
252 @pytest.fixture(scope="session")
253 def app_client_immutable_and_inspect_file():
254 inspect_data = {"fixtures": {"tables": {"sortable": {"count": 100}}}}
datasette/tests/test_api.py
25 )
26 import json
27 import pytest
28 import sys
29 import urllib
633
634
635 @pytest.mark.parametrize(
636 "path,expected_redirect",
637 (
835
836
837 @pytest.mark.parametrize(
838 "path,expected_rows,expected_pages",
839 [
872
873
874 @pytest.mark.parametrize(
875 "path,expected_error",
876 [
935
936
937 @pytest.mark.parametrize(
938 "query_string,sort_key,human_description_en",
939 [
1023
1024
1025 @pytest.mark.parametrize(
1026 "path,expected_rows",
1027 [
1077
1078
1079 @pytest.mark.parametrize(
1080 "path,expected_rows",
1081 [
1114
1115
1116 @pytest.mark.parametrize(
1117 "path,expected_rows",
1118 [
1141
1142
1143 @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module")
1144 def test_table_filter_json_arraycontains(app_client):
1145 response = app_client.get("/fixtures/facetable.json?tags__arraycontains=tag1")
1172
1173
1174 @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module")
1175 def test_table_filter_json_arraynotcontains(app_client):
1176 response = app_client.get(
1427
1428
1429 @pytest.mark.parametrize(
1430 "path,expected_redirect",
1431 (
1455
1456
1457 @pytest.mark.parametrize(
1458 "path,expected_facet_results",
1459 [
1720
1721
1722 @pytest.mark.parametrize(
1723 "path,expected_cache_control",
1724 [
1734
1735
1736 @pytest.mark.parametrize(
1737 "path,expected_redirect",
1738 [
1770
1771
1772 @pytest.mark.parametrize(
1773 "extra_args,expected",
1774 [
1857
1858
1859 @pytest.mark.parametrize(
1860 "path,status_code",
1861 [
1873
1874
1875 @pytest.mark.parametrize(
1876 "path",
1877 (
1929
1930
1931 @pytest.mark.parametrize(
1932 "path,expected_json,expected_text",
1933 [
1960
1961
1962 @pytest.mark.parametrize(
1963 "qs",
1964 [
1990
1991
1992 @pytest.mark.skipif(
1993 sqlite_version() < (3, 31, 0),
1994 reason="generated columns were added in SQLite 3.31.0",
datasette/tests/test_auth.py
1 from .fixtures import app_client
2 import baseconv
3 import pytest
4 import time
5
47
48
49 @pytest.mark.parametrize(
50 "offset,expected",
51 [
95
96
97 @pytest.mark.parametrize("path", ["/", "/fixtures", "/fixtures/facetable"])
98 def test_logout_button_in_navigation(app_client, path):
99 response = app_client.get(
109
110
111 @pytest.mark.parametrize("path", ["/", "/fixtures", "/fixtures/facetable"])
112 def test_no_logout_button_in_navigation_if_no_ds_actor_cookie(app_client, path):
113 response = app_client.get(path + "?_bot=1")
datasette/tests/test_canned_queries.py
1 from bs4 import BeautifulSoup as Soup
2 import json
3 import pytest
4 import re
5 from .fixtures import make_app_client, app_client
6
7
8 @pytest.fixture
9 def canned_write_client():
10 with make_app_client(
72
73
74 @pytest.mark.parametrize(
75 "query_name,expect_csrf_hidden_field",
76 [
177
178
179 @pytest.mark.parametrize(
180 "headers,body,querystring",
181 (
250
251
252 @pytest.fixture(scope="session")
253 def magic_parameters_client():
254 with make_app_client(
268
269
270 @pytest.mark.parametrize(
271 "magic_parameter,expected_re",
272 [
319
320
321 @pytest.mark.parametrize("use_csrf", [True, False])
322 @pytest.mark.parametrize("return_json", [True, False])
323 def test_magic_parameters_csrf_json(magic_parameters_client, use_csrf, return_json):
324 magic_parameters_client.ds._metadata["databases"]["data"]["queries"]["runme_post"][
datasette/tests/test_black.py
2 from click.testing import CliRunner
3 from pathlib import Path
4 import pytest
5 import sys
6
datasette/tests/test_cli.py
14 import json
15 import pathlib
16 import pytest
17 import sys
18 import textwrap
21
22
23 @pytest.fixture
24 def ensure_eventloop():
25 # Workaround for "Event loop is closed" error
62
63
64 @pytest.mark.parametrize(
65 "spatialite_paths,should_suggest_load_extension",
66 (
173
174
175 @pytest.mark.parametrize("flag", ["-U", "--upgrade"])
176 @mock.patch("datasette.cli.run_module")
177 def test_install_upgrade(run_module, flag):
196
197
198 @pytest.mark.parametrize("invalid_port", ["-1", "0.5", "dog", "65536"])
199 def test_serve_invalid_ports(ensure_eventloop, invalid_port):
200 runner = CliRunner(mix_stderr=False)
270
271
272 @pytest.mark.parametrize(
273 "filename", ["test-database (1).sqlite", "database (1).sqlite"]
274 )
datasette/tests/test_config_dir.py
1 import json
2 import pytest
3
4 from datasette.app import Datasette
26
27
28 @pytest.fixture(scope="session")
29 def config_dir_client(tmp_path_factory):
30 config_dir = tmp_path_factory.mktemp("config-dir")
142
143
144 @pytest.mark.parametrize("filename", ("metadata.yml", "metadata.yaml"))
145 def test_metadata_yaml(tmp_path_factory, filename):
146 config_dir = tmp_path_factory.mktemp("yaml-config-dir")
datasette/tests/test_custom_pages.py
1 import pathlib
2 import pytest
3 from .fixtures import make_app_client
4
5
6 @pytest.fixture(scope="session")
7 def custom_pages_client():
8 with make_app_client(
65
66
67 @pytest.mark.parametrize(
68 "path,expected",
69 [
datasette/tests/test_docs.py
7 from datasette.filters import Filters
8 from pathlib import Path
9 import pytest
10 import re
11
24
25
26 @pytest.fixture(scope="session")
27 def settings_headings():
28 return get_headings((docs_path / "settings.rst").open().read(), "~")
29
30
31 @pytest.mark.parametrize("setting", app.SETTINGS)
32 def test_settings_are_documented(settings_headings, setting):
33 assert setting.name in settings_headings
34
35
36 @pytest.mark.parametrize(
37 "name,filename",
38 (
54
55
56 @pytest.fixture(scope="session")
57 def plugin_hooks_content():
58 return (docs_path / "plugin_hooks.rst").open().read()
59
60
61 @pytest.mark.parametrize(
62 "plugin", [name for name in dir(app.pm.hook) if not name.startswith("_")]
63 )
74
75
76 @pytest.fixture(scope="session")
77 def documented_views():
78 view_labels = set()
87
88
89 @pytest.mark.parametrize("view_class", [v for v in dir(app) if v.endswith("View")])
90 def test_view_classes_are_documented(documented_views, view_class):
91 assert view_class in documented_views
92
93
94 @pytest.fixture(scope="session")
95 def documented_table_filters():
96 json_api_rst = (docs_path / "json_api.rst").read_text()
104
105
106 @pytest.mark.parametrize("filter", [f.key for f in Filters._filters])
107 def test_table_filters_are_documented(documented_table_filters, filter):
108 assert filter in documented_table_filters
datasette/tests/test_facets.py
5 from datasette.utils import detect_json1
6 from .fixtures import app_client # noqa
7 import pytest
8
9
10 @pytest.mark.asyncio
11 async def test_column_facet_suggest(app_client):
12 facet = ColumnFacet(
33
34
35 @pytest.mark.asyncio
36 async def test_column_facet_suggest_skip_if_already_selected(app_client):
37 facet = ColumnFacet(
71
72
73 @pytest.mark.asyncio
74 async def test_column_facet_suggest_skip_if_enabled_by_metadata(app_client):
75 facet = ColumnFacet(
93
94
95 @pytest.mark.asyncio
96 async def test_column_facet_results(app_client):
97 facet = ColumnFacet(
145
146
147 @pytest.mark.asyncio
148 async def test_column_facet_from_metadata_cannot_be_hidden(app_client):
149 facet = ColumnFacet(
198
199
200 @pytest.mark.asyncio
201 @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module")
202 async def test_array_facet_suggest(app_client):
203 facet = ArrayFacet(
218
219
220 @pytest.mark.asyncio
221 @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module")
222 async def test_array_facet_suggest_not_if_all_empty_arrays(app_client):
223 facet = ArrayFacet(
232
233
234 @pytest.mark.asyncio
235 @pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module")
236 async def test_array_facet_results(app_client):
237 facet = ArrayFacet(
278
279
280 @pytest.mark.asyncio
281 async def test_date_facet_results(app_client):
282 facet = DateFacet(
330
331
332 @pytest.mark.asyncio
333 async def test_json_array_with_blanks_and_nulls():
334 ds = Datasette([], memory=True)
datasette/tests/test_filters.py
1 from datasette.filters import Filters
2 import pytest
3
4
5 @pytest.mark.parametrize(
6 "args,expected_where,expected_params",
7 [
datasette/tests/test_html.py
12 import json
13 import pathlib
14 import pytest
15 import re
16 import textwrap
169
170
171 @pytest.mark.parametrize(
172 "path,expected_definition_sql",
173 [
552
553
554 @pytest.mark.parametrize(
555 "path,expected_classes",
556 [
583
584
585 @pytest.mark.parametrize(
586 "path,expected_considered",
587 [
874
875
876 @pytest.mark.parametrize(
877 "path,expected_column_options",
878 [
1111
1112
1113 @pytest.mark.parametrize("path", ["/404", "/fixtures/404"])
1114 def test_404(app_client, path):
1115 response = app_client.get(path)
1121
1122
1123 @pytest.mark.parametrize(
1124 "path,expected_redirect",
1125 [("/fixtures/", "/fixtures"), ("/fixtures/simple_view/", "/fixtures/simple_view")],
1161
1162
1163 @pytest.mark.parametrize(
1164 "path,has_object,has_stream,has_expand",
1165 [
1248
1249
1250 @pytest.mark.parametrize(
1251 "path,expected_hidden",
1252 [
1270
1271
1272 @pytest.mark.parametrize(
1273 "path,expected_hidden",
1274 [
1331
1332
1333 @pytest.mark.parametrize(
1334 "path,expected_filename",
1335 [
1353
1354
1355 @pytest.mark.parametrize(
1356 "path,expected_message",
1357 [
1390
1391
1392 @pytest.mark.parametrize(
1393 "path",
1394 [
1487
1488
1489 @pytest.mark.parametrize(
1490 "path",
1491 [
1535
1536
1537 @pytest.mark.parametrize(
1538 "path,expected",
1539 [
1563
1564
1565 @pytest.mark.parametrize("permission_allowed", [True, False])
1566 def test_edit_sql_link_not_shown_if_user_lacks_permission(permission_allowed):
1567 with make_app_client(
1578
1579
1580 @pytest.mark.parametrize(
1581 "actor_id,should_have_links,should_not_have_links",
1582 [
datasette/tests/test_internal_db.py
1 from .fixtures import app_client
2 import pytest
3
4
datasette/tests/test_internals_database.py
6 from datasette.utils import Column
7 from .fixtures import app_client, app_client_two_attached_databases_crossdb_enabled
8 import pytest
9 import time
10 import uuid
11
12
13 @pytest.fixture
14 def db(app_client):
15 return app_client.ds.get_database("fixtures")
16
17
18 @pytest.mark.asyncio
19 async def test_execute(db):
20 results = await db.execute("select * from facetable")
23
24
25 @pytest.mark.asyncio
26 async def test_results_first(db):
27 assert None is (await db.execute("select * from facetable where pk > 100")).first()
31
32
33 @pytest.mark.parametrize(
34 "query,expected",
35 [
39 ],
40 )
41 @pytest.mark.asyncio
42 async def test_results_single_value(db, query, expected):
43 results = await db.execute(query)
45 assert expected == results.single_value()
46 else:
47 with pytest.raises(MultipleValues):
48 results.single_value()
49
50
51 @pytest.mark.asyncio
52 async def test_execute_fn(db):
53 def get_1_plus_1(conn):
57
58
59 @pytest.mark.parametrize(
60 "tables,exists",
61 (
64 ),
65 )
66 @pytest.mark.asyncio
67 async def test_table_exists(db, tables, exists):
68 for table in tables:
71
72
73 @pytest.mark.parametrize(
74 "table,expected",
75 (
103 ),
104 )
105 @pytest.mark.asyncio
106 async def test_table_columns(db, table, expected):
107 columns = await db.table_columns(table)
109
110
111 @pytest.mark.parametrize(
112 "table,expected",
113 (
277 ),
278 )
279 @pytest.mark.asyncio
280 async def test_table_column_details(db, table, expected):
281 columns = await db.table_column_details(table)
283
284
285 @pytest.mark.asyncio
286 async def test_get_all_foreign_keys(db):
287 all_foreign_keys = await db.get_all_foreign_keys()
338
339
340 @pytest.mark.asyncio
341 async def test_table_names(db):
342 table_names = await db.table_names()
379
380
381 @pytest.mark.asyncio
382 async def test_execute_write_block_true(db):
383 await db.execute_write(
390
391
392 @pytest.mark.asyncio
393 async def test_execute_write_block_false(db):
394 await db.execute_write(
401
402
403 @pytest.mark.asyncio
404 async def test_execute_write_fn_block_false(db):
405 def write_fn(conn):
413
414
415 @pytest.mark.asyncio
416 async def test_execute_write_fn_block_true(db):
417 def write_fn(conn):
425
426
427 @pytest.mark.asyncio
428 async def test_execute_write_fn_exception(db):
429 def write_fn(conn):
430 assert False
431
432 with pytest.raises(AssertionError):
433 await db.execute_write_fn(write_fn, block=True)
434
435
436 @pytest.mark.asyncio
437 @pytest.mark.timeout(1)
438 async def test_execute_write_fn_connection_exception(tmpdir, app_client):
439 path = str(tmpdir / "immutable.db")
445 assert False
446
447 with pytest.raises(AssertionError):
448 await db.execute_write_fn(write_fn, block=True)
449
451
452
453 @pytest.mark.asyncio
454 async def test_mtime_ns(db):
455 assert isinstance(db.mtime_ns, int)
467
468
469 @pytest.mark.asyncio
470 async def test_attached_databases(app_client_two_attached_databases_crossdb_enabled):
471 database = app_client_two_attached_databases_crossdb_enabled.ds.get_database(
476
477
478 @pytest.mark.asyncio
479 async def test_database_memory_name(app_client):
480 ds = app_client.ds
494
495
496 @pytest.mark.asyncio
497 async def test_in_memory_databases_forbid_writes(app_client):
498 ds = app_client.ds
499 db = ds.add_database(Database(ds, memory_name="test"))
500 with pytest.raises(sqlite3.OperationalError):
501 await db.execute("create table foo (t text)")
502 assert await db.table_names() == []
datasette/tests/test_internals_datasette_client.py
1 from .fixtures import app_client
2 import httpx
3 import pytest
4
5
6 @pytest.fixture
7 def datasette(app_client):
8 return app_client.ds
9
10
11 @pytest.mark.asyncio
12 @pytest.mark.parametrize(
13 "method,path,expected_status",
14 [
31
32
33 @pytest.mark.asyncio
34 @pytest.mark.parametrize("prefix", [None, "/prefix/"])
35 async def test_client_post(datasette, prefix):
36 original_base_url = datasette._settings["base_url"]
52
53
54 @pytest.mark.asyncio
55 @pytest.mark.parametrize(
56 "prefix,expected_path", [(None, "/asgi-scope"), ("/prefix/", "/prefix/asgi-scope")]
57 )
datasette/tests/test_internals_datasette.py
4 from itsdangerous import BadSignature
5 from .fixtures import app_client
6 import pytest
7
8
9 @pytest.fixture
10 def datasette(app_client):
11 return app_client.ds
15 db = datasette.get_database("fixtures")
16 assert "fixtures" == db.name
17 with pytest.raises(KeyError):
18 datasette.get_database("missing")
19
25
26
27 @pytest.mark.parametrize("value", ["hello", 123, {"key": "value"}])
28 @pytest.mark.parametrize("namespace", [None, "two"])
29 def test_sign_unsign(datasette, value, namespace):
30 extra_args = [namespace] if namespace else []
32 assert value != signed
33 assert value == datasette.unsign(signed, *extra_args)
34 with pytest.raises(BadSignature):
35 datasette.unsign(signed[:-1] + ("!" if signed[-1] != "!" else ":"))
36
37
38 @pytest.mark.parametrize(
39 "setting,expected",
40 (
datasette/tests/test_internals_request.py
1 from datasette.utils.asgi import Request
2 import json
3 import pytest
4
5
6 @pytest.mark.asyncio
7 async def test_request_post_vars():
8 scope = {
28
29
30 @pytest.mark.asyncio
31 async def test_request_post_body():
32 scope = {
72 assert expected[i] == key
73 assert 2 == len(request.args)