ripgrep

Options:

For example *.py or **/templates/**/*.html or datasette/** or !setup.py

whosonfirst-datasette/Dockerfile

1   FROM datasetteproject/datasette
2   RUN apt update && apt install -y wget bzip2
3   RUN wget -q https://latest.datasette.io/fixtures.db
4   # RUN datasette inspect --inspect-file=/mnt/whosonfirst/inspect.json /fixtures.db /mnt/whosonfirst/whosonfirst-data-latest.db
5   EXPOSE 8001
6   CMD ["datasette", "-h", "0.0.0.0", "-p", "8001", "/mnt/whosonfirst/whosonfirst-data-latest.db", "/fixtures.db", "--cors"]

yaml-to-sqlite/tests/test_cli.py

1   from click.testing import CliRunner
2   from yaml_to_sqlite import cli
3   import sqlite_utils
29  
30  
31  def test_without_pk(tmpdir):
32      db_path = tmpdir / "db.db"
33      assert (
44  
45  
46  def test_with_pk(tmpdir):
47      db_path = tmpdir / "db.db"
48      assert (
59  
60  
61  def test_single_column(tmpdir):
62      db_path = tmpdir / "db.db"
63      test_yaml = "- One\n" "- Two\n" "- Three\n"
64      assert (
65          0
68              cli.cli,
69              [str(db_path), "numbers", "-", "--single-column", "name"],
70              input=test_yaml,
71          )
72          .exit_code
78  
79  
80  def test_alters_if_necessary(tmpdir):
81      db_path = tmpdir / "db.db"
82      assert (

vaccinate-ca-datasette/metadata.yml

5   about_url: https://github.com/simonw/vaccinate-ca-datasette
6   description_html: |-
7     <a href="/vaccinateca/locations?_facet=Affiliation&_facet=Latest+report+yes%3F&Latest+report+yes%3F=1&_facet_array=Availability+Info">Latest places that reported yes, on a map</a>

yaml-to-sqlite/setup.py

24      packages=find_packages(),
25      install_requires=["click", "PyYAML", "sqlite-utils>=3.9.1"],
26      setup_requires=["pytest-runner"],
27      extras_require={"test": ["pytest"]},
28      entry_points="""
29          [console_scripts]
30          yaml-to-sqlite=yaml_to_sqlite.cli:cli
31      """,
32      tests_require=["yaml-to-sqlite[test]"],
33      url="https://github.com/simonw/yaml-to-sqlite",
34      classifiers=[

twitter-to-sqlite/setup.py

37          "python-dateutil",
38      ],
39      extras_require={"test": ["pytest"]},
40      tests_require=["twitter-to-sqlite[test]"],
41  )

twitter-to-sqlite/tests/test_save_tweets.py

2   import pathlib
3   
4   import pytest
5   import sqlite_utils
6   from twitter_to_sqlite import utils
7   
8   
9   @pytest.fixture
10  def tweets():
11      return json.load(open(pathlib.Path(__file__).parent / "tweets.json"))
12  
13  
14  @pytest.fixture
15  def db(tweets):
16      db = sqlite_utils.Database(memory=True)
19  
20  
21  def test_tables(db):
22      assert {
23          "sources",
52  
53  
54  def test_users(db):
55      user_rows = list(db["users"].rows)
56      assert [
184 
185 
186 def test_tweets(db):
187     tweet_rows = list(db["tweets"].rows)
188     assert [
315 
316 
317 def test_sources(db):
318     source_rows = list(db["sources"].rows)
319     assert [
341 
342 
343 def test_places(db):
344     place_rows = list(db["places"].rows)
345     assert [
359 
360 
361 def test_media(db):
362     media_rows = list(db["media"].rows)
363     media_tweets_rows = list(db["media_tweets"].rows)

twitter-to-sqlite/tests/test_migrations.py

1   import sqlite_utils
2   from click.testing import CliRunner
3   import sqlite_utils
4   from twitter_to_sqlite import cli, migrations
5   
6   from .test_import import zip_contents_path
7   from .test_save_tweets import db, tweets
8   
9   
10  def test_no_migrations_on_first_run(tmpdir, zip_contents_path):
11      output = str(tmpdir / "output.db")
12      args = ["import", output, str(zip_contents_path / "follower.js")]
21  
22  
23  def test_convert_source_column():
24      db = sqlite_utils.Database(memory=True)
25      db["tweets"].insert_all(
51  
52  
53  def test_convert_source_column_against_real_database(db):
54      assert "migrations" not in db.table_names()
55      migrations.convert_source_column(db)

twitter-to-sqlite/tests/test_import.py

2   import pathlib
3   
4   import pytest
5   import sqlite_utils
6   from click.testing import CliRunner
7   from twitter_to_sqlite import cli
8   
10  
11  
12  @pytest.fixture
13  def zip_contents_path():
14      return pathlib.Path(__file__).parent / "zip_contents"
15  
16  
17  @pytest.fixture
18  def import_test_zip(tmpdir, zip_contents_path):
19      archive = str(tmpdir / "archive.zip")
20      buf = io.BytesIO()
25  
26  
27  def test_create_zip(zip_contents_path):
28      zf = create_zip(zip_contents_path)
29      assert {
37  
38  
39  def test_cli_import_zip_file(import_test_zip):
40      tmpdir, archive = import_test_zip
41      output = str(tmpdir / "output.db")
42      result = CliRunner().invoke(cli.cli, ["import", output, archive])
46  
47  
48  def test_cli_import_folder(tmpdir, zip_contents_path):
49      output = str(tmpdir / "output.db")
50      result = CliRunner().invoke(cli.cli, ["import", output, str(zip_contents_path)])
54  
55  
56  def test_cli_import_specific_files(tmpdir, zip_contents_path):
57      output = str(tmpdir / "output.db")
58      result = CliRunner().invoke(
109 
110 
111 def test_deletes_existing_archive_tables(import_test_zip):
112     tmpdir, archive = import_test_zip
113     output = str(tmpdir / "output.db")
114     db = sqlite_utils.Database(output)

til/typescript/basic-tsc.md

22      % ./node_modules/.bin/tsc --init
23  
24  Next step: create a `.ts` file to start testing it out. I put the following in `greetings.ts`:
25  
26  ```typescript

til/sqlite/track-timestamped-changes-to-a-table.md

75  INSERT INTO foo VALUES ('hello4');
76  ```
77  To test this I ran `sqlite3` (with no arguments, which provides an in-memory database to play with), pasted in the above and then ran this:
78  ```
79  sqlite> .headers on

til/sqlite/text-value-is-integer-or-float.md

17  The `|| '.0'` bit there is needed because `cast('1' as REAL)` returns `1.0`, not just `1`.
18  
19  (Note that `1.200` will not pass this test and will be incorrectly considered an invalid floating point representation)
20  
21  ## Demos
42    )
43  ```
44  [Try that here](https://latest.datasette.io/fixtures?sql=select%0D%0A++value%2C%0D%0A++cast%28cast%28value+AS+REAL%29+AS+TEXT%29+in+%28value%2C+value+%7C%7C+%27.0%27%29+as+is_valid_float%0D%0Afrom%0D%0A++%28%0D%0A++++select%0D%0A++++++%271%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++%271.1%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++%27dog%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++null+as+value%0D%0A++%29)
45  
46  | value | is_valid_float |
71    )
72  ```
73  [Try that here](https://latest.datasette.io/fixtures?sql=select%0D%0A++value%2C%0D%0A++cast%28cast%28value+AS+INTEGER%29+AS+TEXT%29+%3D+value+as+is_valid_int%0D%0Afrom%0D%0A++%28%0D%0A++++select%0D%0A++++++%271%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++%271.1%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++%27dog%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++null+as+value%0D%0A++%29)
74  
75  | value | is_valid_int |

til/sqlite/splitting-commas-sqlite.md

25  | x12 | y1234 | z12345 |
26  
27  Here's [a live demo of the query](https://latest.datasette.io/fixtures?sql=with+comma_locations+as+%28%0D%0A++select+instr%28%3Apath%2C+%27%2C%27%29+as+first_comma%2C%0D%0A++instr%28%3Apath%2C+%27%2C%27%29+%2B+instr%28substr%28%3Apath%2C+instr%28%3Apath%2C+%27%2C%27%29+%2B+1%29%2C+%27%2C%27%29+as+second_comma%0D%0A%29%2C+variables+as+%28%0D%0A++select%0D%0A++++substr%28%3Apath%2C+0%2C+first_comma%29+as+first%2C%0D%0A++++substr%28%3Apath%2C+first_comma+%2B+1%2C+second_comma+-+first_comma+-+1%29+as+second%2C%0D%0A++++substr%28%3Apath%2C+second_comma+%2B+1%29+as+third%0D%0A++from+comma_locations%0D%0A%29%0D%0Aselect+*+from+variables&path=x12%2Cy1234%2Cz12345).

til/sqlite/replicating-rqlite.md

5   By default `rqlite` asks you to use its own custom HTTP API - but I wanted to try running it against Datasette. rqlite author Philip O'Toole confirmed that [this should work](https://twitter.com/general_order24/status/1343619601758908419) provided any writes go through the API - each node can be configured to write to an on-disk database file which Datasette can then read from (the default is to use in-memory databases and an on-disk Raft log).
6   
7   Here's how I got that working on my macOS laptop. I used the latest macOS binary from https://github.com/rqlite/rqlite/releases (`rqlite` is written in Go and provides pre-complied binaries for different systems).
8   
9       cd /tmp

til/sqlite/list-all-columns-in-a-database.md

3   Here's a devious trick for listing ALL columns in a SQLite database, using a SQL query that generates another SQL query.
4   
5   The first query ([demo](https://latest.datasette.io/fixtures?sql=select+group_concat%28%0D%0A++%22select+%27%22+%7C%7C+name+%7C%7C+%22%27+as+table_name%2C+*+from+pragma_table_info%28%27%22+%7C%7C+name+%7C%7C+%22%27%29%22%0D%0A%2C+%27+union+%27%29+%7C%7C+%27+order+by+table_name%2C+cid%27%0D%0A++from+sqlite_master+where+type+%3D+%27table%27%3B)):
6   
7   ```sql
11    from sqlite_master where type = 'table';
12  ```
13  This outputs the second query, which will look something like this ([demo](https://latest.datasette.io/fixtures?sql=select+%27simple_primary_key%27+as+table_name%2C+*+from+pragma_table_info%28%27simple_primary_key%27%29+union+select+%27primary_key_multiple_columns%27+as+table_name%2C+*+from+pragma_table_info%28%27primary_key_multiple_columns%27%29+union+select+%27primary_key_multiple_columns_explicit_label%27+as+table_name%2C+*+from+pragma_table_info%28%27primary_key_multiple_columns_explicit_label%27%29+union+select+%27compound_primary_key%27+as+table_name%2C+*+from+pragma_table_info%28%27compound_primary_key%27%29+union+select+%27compound_three_primary_keys%27+as+table_name%2C+*+from+pragma_table_info%28%27compound_three_primary_keys%27%29+union+select+%27foreign_key_references%27+as+table_name%2C+*+from+pragma_table_info%28%27foreign_key_references%27%29+union+select+%27sortable%27+as+table_name%2C+*+from+pragma_table_info%28%27sortable%27%29+union+select+%27no_primary_key%27+as+table_name%2C+*+from+pragma_table_info%28%27no_primary_key%27%29+union+select+%27123_starts_with_digits%27+as+table_name%2C+*+from+pragma_table_info%28%27123_starts_with_digits%27%29+union+select+%27Table+With+Space+In+Name%27+as+table_name%2C+*+from+pragma_table_info%28%27Table+With+Space+In+Name%27%29+union+select+%27table%2Fwith%2Fslashes.csv%27+as+table_name%2C+*+from+pragma_table_info%28%27table%2Fwith%2Fslashes.csv%27%29+union+select+%27complex_foreign_keys%27+as+table_name%2C+*+from+pragma_table_info%28%27complex_foreign_keys%27%29+union+select+%27custom_foreign_key_label%27+as+table_name%2C+*+from+pragma_table_info%28%27custom_foreign_key_label%27%29+union+select+%27units%27+as+table_name%2C+*+from+pragma_table_info%28%27units%27%29+union+select+%27tags%27+as+table_name%2C+*+from+pragma_table_info%28%27tags%27%29+union+select+%27searchable%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable%27%29+union+select+%27searchable_tags%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_tags%27%29+union+select+%27searchable_fts%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_fts%27%29+union+select+%27searchable_fts_content%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_fts_content%27%29+union+select+%27searchable_fts_segments%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_fts_segments%27%29+union+select+%27searchable_fts_segdir%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_fts_segdir%27%29+union+select+%27select%27+as+table_name%2C+*+from+pragma_table_info%28%27select%27%29+union+select+%27infinity%27+as+table_name%2C+*+from+pragma_table_info%28%27infinity%27%29+union+select+%27facet_cities%27+as+table_name%2C+*+from+pragma_table_info%28%27facet_cities%27%29+union+select+%27facetable%27+as+table_name%2C+*+from+pragma_table_info%28%27facetable%27%29+union+select+%27binary_data%27+as+table_name%2C+*+from+pragma_table_info%28%27binary_data%27%29+union+select+%27roadside_attractions%27+as+table_name%2C+*+from+pragma_table_info%28%27roadside_attractions%27%29+union+select+%27attraction_characteristic%27+as+table_name%2C+*+from+pragma_table_info%28%27attraction_characteristic%27%29+union+select+%27roadside_attraction_characteristics%27+as+table_name%2C+*+from+pragma_table_info%28%27roadside_attraction_characteristics%27%29+order+by+table_name%2C+cid)):
14  ```sql
15  select 'simple_primary_key' as table_name, * from pragma_table_info('simple_primary_key') union
156   sqlite_master.name
157 ```
158 [Demo](https://latest.datasette.io/fixtures?sql=select%0D%0A++sqlite_master.name+as+table_name%2C%0D%0A++table_info.*%0D%0Afrom%0D%0A++sqlite_master%0D%0A++join+pragma_table_info%28sqlite_master.name%29+as+table_info%0D%0Aorder+by%0D%0A++sqlite_master.name%2C%0D%0A++table_info.cid).
159 
160 This works with the `pragma_table_info` and `pragma_index_list` and `pragma_foreign_key_list` functions too.
178   columnName
179 ```
180 [Demo](https://latest.datasette.io/fixtures?sql=SELECT+m.name+as+tableName%2C+p.name+as+columnName%0D%0A++++++++FROM+sqlite_master+m%0D%0A++++++++LEFT+OUTER+JOIN+pragma_table_info%28%28m.name%29%29+p+ON+m.name+%3C%3E+p.name%0D%0A++++++++WHERE+m.type+IN+%28%27table%27%2C%27view%27%29+AND+m.name+NOT+LIKE+%27sqlite_%25%27%0D%0A++++++++ORDER+BY+tableName%2C+columnName).

til/sqlite/column-combinations.md

32    num_rows desc
33  ```
34  [Try that here](https://latest.datasette.io/fixtures?sql=select%0D%0A++++case+when+%5Bpk%5D+is+not+null+then+%27pk%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bcreated%5D+is+not+null+then+%27created%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bplanet_int%5D+is+not+null+then+%27planet_int%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bon_earth%5D+is+not+null+then+%27on_earth%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bstate%5D+is+not+null+then+%27state%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bcity_id%5D+is+not+null+then+%27city_id%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bneighborhood%5D+is+not+null+then+%27neighborhood%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Btags%5D+is+not+null+then+%27tags%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bcomplex_array%5D+is+not+null+then+%27complex_array%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bdistinct_some_null%5D+is+not+null+then+%27distinct_some_null%2C+%27+else+%27%27+end%0D%0A++as+columns%2C%0D%0A++count%28*%29+as+num_rows%0D%0Afrom%0D%0A++%5Bfacetable%5D%0D%0Agroup+by%0D%0A++columns%0D%0Aorder+by%0D%0A++num_rows+desc)
35  
36  This has the desired effect: it gives me back all of the combinations of not-null columns in the table, with a count for each one.
53    num_rows desc' as query from pragma_table_info(:table)
54  ```
55  [Try that out](https://latest.datasette.io/fixtures?sql=select+%27select%0D%0A%27+%7C%7C+group_concat%28%27++++case+when+%5B%27+%7C%7C+name+%7C%7C+%27%5D+is+not+null+then+%27+%7C%7C+quote%28name+%7C%7C+%27%2C+%27%29+%7C%7C+%27+else+%27%27%27%27+end%27%2C+%27+%7C%7C%0D%0A%27%29+%7C%7C+%27%0D%0A++as+columns%2C%0D%0A++count%28*%29+as+num_rows%0D%0Afrom%0D%0A++%5B%27+%7C%7C+%3Atable+%7C%7C+%27%5D%0D%0Agroup+by%0D%0A++columns%0D%0Aorder+by%0D%0A++num_rows+desc%27+as+query+from+pragma_table_info%28%3Atable%29&table=facetable)
56  
57  This takes `:table` as an input and generates SQL which can be used to generate column-combination counts.

til/sqlite/build-specific-sqlite-pysqlite-macos.md

1   # Building a specific version of SQLite with pysqlite on macOS/Linux
2   
3   I wanted the ability to test my Python software against specific version of SQLite on macOS. I found a way to do that using [pysqlite3](https://github.com/coleifer/pysqlite3).
4   
5   First, clone the GitHub mirror of SQLite (so I don't have to learn how to use Fossil):
31      python3 setup.py build_static build
32  
33  The end result sits in  a `pysqlite3` folder in, on my machine, `/tmp/pysqlite3/build/lib.macosx-10.15-x86_64-3.9` - test it like this:
34  
35      cd /tmp/pysqlite3/build/lib.macosx-10.15-x86_64-3.9
54      pip install /tmp/pysqlite3/dist/pysqlite3-0.4.6-cp39-cp39-macosx_10_15_x86_64.whl
55  
56  This exact same process works on Linux too (tested inside a default GitHub Actions Linux worker).

til/sqlite/blob-literals.md

7   ```
8   
9   This was while writing a unit test for `datasette-media` - for [issue #19](https://github.com/simonw/datasette-media/issues/19). I used it in the test [here](https://github.com/simonw/datasette-media/blob/2cf64d949ccb8cd5f34b24aeb41b2a91de14cdd2/tests/test_media.py#L292-L295).
10  
11  The SQLite documentation for [Literal values](https://www.sqlite.org/lang_expr.html#literal_values_constants_) explains how to do this:

til/spatialite/knn.md

1   # KNN queries with SpatiaLite
2   
3   The latest version of SpatiaLite adds KNN support, which makes it easy to efficiently answer the question "what are the X closest records to this point".
4   
5   The USGS earthquakes GeoJSON is a great dataset for experimenting with these features.
9   `https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson` currently contains 10,642 features.
10  
11  To turn that into a SpatiaLite database using the latest version of [geojson-to-sqlite](https://github.com/simonw/geojson-to-sqlite):
12  ```bash
13  curl 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson' | \

til/sphinx/sphinx-autodoc.md

1   # Adding Sphinx autodoc to a project, and configuring Read The Docs to build it
2   
3   I built a [new API reference page](https://sqlite-utils.datasette.io/en/latest/reference.html) today for `sqlite-utils`, using the Sphinx [autodoc extension](https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html) to extract docstrings from the code and use them to build a full class reference.
4   
5   I've avoided this kind of documentation in the past because I think narrative prose is a *much* better way of providing documentation - but ``sqlite-utils`` already has [detailed narrative prose](https://sqlite-utils.datasette.io/en/stable/python-api.html), so I felt that adding reference documentation powered by docstrings could enhance that project - while also providing better inline document for tools such as Visual Studio Code and Jupyter.
139 ## The end result
140 
141 The new page of documentation is now live at [en/latest/reference.html](https://sqlite-utils.datasette.io/en/latest/reference.html). The pull request in which I figured this all out is [sqlite-utils/pull/312](https://github.com/simonw/sqlite-utils/pull/312).

til/selenium/selenium-python-macos.md

84  print(firefox.find_element_by_css_selector('body').text)
85  ```
86  I used `wget` for the download (rather than clicking the link in my browser) thanks to the warning here: https://firefox-source-docs.mozilla.org/testing/geckodriver/Notarization.html
87  
88  An easier option: install it with Homebrew:

til/python/packaging-pyinstaller.md

67  I solved this by adding each `ModuleNotFoundError` module to `--hidden-import` until it worked.
68  
69  I've tested this script (and the generated executables) on both macOS and Ubuntu Linux so far, and it's worked perfectly in both cases. See [issue 93](https://github.com/simonw/datasette/issues/93) for more details.

til/python/introspect-function-parameters.md

18  ```
19  
20  And here's an illustrative unit test:
21  
22  ```python
23  def test_call_with_supported_arguments():
24      def foo(a, b):
25          return "{}+{}".format(a, b)
28      assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2, c=3)
29  
30      with pytest.raises(TypeError):
31          utils.call_with_supported_arguments(foo, a=1)
32  ```

til/python/installing-upgrading-plugins-with-pipx.md

56      ]
57  
58  I added all of this to the Datasette docs here: https://docs.datasette.io/en/latest/installation.html#using-pipx
59  (see https://github.com/simonw/datasette/issues/756).

til/python/generate-nested-json-summary.md

1   # Generated a summary of nested JSON data
2   
3   I was trying to figure out the shape of the JSON object from https://github.com/simonw/coronavirus-data-gov-archive/blob/master/data_latest.json?raw=true - which is 3.2MB and heavily nested, so it's difficult to get a good feel for the shape.
4   
5   I solved this with a Python `summarize()` function which recursively truncates the nested lists and dictionaries.
28  import json, requests
29  data = requests.get(
30      "https://github.com/simonw/coronavirus-data-gov-archive/blob/master/data_latest.json?raw=true"
31  ).json()
32  print(json.dumps(summarize(data, list_limit=2, key_limit=7), indent=4))

til/python/codespell.md

51  jobs:
52    spellcheck:
53      runs-on: ubuntu-latest
54      steps:
55      - uses: actions/checkout@v2

til/python/call-pip-programatically.md

3   I needed this for the `datasette install` and `datasette uninstall` commands, see [issue #925](https://github.com/simonw/datasette/issues/925).
4   
5   My initial attempt at this resulted in weird testing errors ([#928](https://github.com/simonw/datasette/issues/928)) - while investigating them I stumbled across [this comment](https://github.com/pypa/pip/blob/e060970d51c5946beac8447eb95585d83019582d/src/pip/_internal/cli/main.py#L23-L47) in the `pip` source code:
6   
7   ```
41       run_module("pip", run_name="__main__") 
42  ```
43  And here's how I wrote [a unit test](https://github.com/simonw/datasette/blob/afdeda8216d4d3027f87583ccdbef17ad85022ef/tests/test_cli.py#L114-L124) for it:
44  ```python
45  @mock.patch("datasette.cli.run_module")
46  def test_install(run_module):
47      runner = CliRunner()
48      runner.invoke(cli, ["install", "datasette-mock-plugin", "datasette-mock-plugin2"])

til/pytest/subprocess-server.md

1   # Start a server in a subprocess during a pytest session
2   
3   I wanted to start an actual server process, run it for the duration of my pytest session and shut it down at the end.
4   
5   Here's the recipe I came up with. This fixture lives in `conftest.py`:
6   
7   ```python
8   import pytest
9   import sqlite_utils
10  import subprocess
11  
12  @pytest.fixture(scope="session")
13  def ds_server(tmp_path_factory):
14      db_directory = tmp_path_factory.mktemp("dbs")
15      db_path = db_directory / "test.db"
16      db = sqlite_utils.Database(db_path)
17      insert_test_data(db)
18      ds_proc = subprocess.Popen(
19          [
31      assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8")
32      yield ds_proc
33      # Shut it down at the end of the pytest session
34      ds_proc.terminate()
35  ```
36  A test looks like this:
37  ```python
38  import httpx
39  
40  def test_server_starts(ds_server):
41      response = httpx.get("http://127.0.0.1:8041/")
42      assert response.status_code == 200

til/pytest/session-scoped-tmp.md

1   # Session-scoped temporary directories in pytest
2   
3   I habitually use the `tmpdir` fixture in pytest to get a temporary directory that will be cleaned up after each test, but that doesn't work with `scope="session"` - which can be used to ensure an expensive fixture is run only once per test session and the generated content is used for multiple tests.
4   
5   To get a temporary directory that works with `scope="session"`, use the `tmp_path_factory` built-in pytest fixture like this:
6   
7   ```python
8   import pytest
9   
10  
11  @pytest.fixture(scope="session")
12  def template_dir(tmp_path_factory):
13      template_dir = tmp_path_factory.mktemp("page-templates")
19  
20  
21  def test_about(template_dir):
22      assert "ABOUT!" == (template_dir / "pages" / "about.html").read_text()
23  
24  
25  def test_request(template_dir):
26      assert "request" == (template_dir / "pages" / "request.html").read_text()
27  ```
28  
29  Example: https://github.com/simonw/datasette/blob/1b7b66c465e44025ec73421bd69752e42f108321/tests/test_custom_pages.py#L16-L45

til/pytest/registering-plugins-in-tests.md

1   # Registering temporary pluggy plugins inside tests
2   
3   While implementing more finely-grained permissions for `datasette-insert-api` ([issue 8](https://github.com/simonw/datasette-insert-api/issues/8)) I decided I wanted to register a Datasette pluggy plugin for the duration of a single test.
4   
5   Here's the pattern I figured out for doing that:
8   from datasette import hookimpl
9   from datasette.plugins import pm
10  import pytest
11  
12  
13  def test_using_test_plugin():
14      class TestPlugin:
15          __name__ = "TestPlugin"
22      pm.register(TestPlugin(), name="undo")
23      try:
24          # Rest of test goes here
25      finally:
26          pm.unregister(name="undo")
27  ```
28  
29  Here's [an example](https://github.com/simonw/datasette-insert/blob/7f4c2b3954190d547619d043bbe714481b10ac1e/tests/test_insert_api.py) of a test that uses a pytest fixture to register (and de-register) a plugin:
30  
31  ```python
33  from datasette.app import Datasette
34  from datasette.plugins import pm
35  import pytest
36  
37  
38  @pytest.fixture
39  def unsafe():
40      class UnsafeInsertAll:
51  
52  
53  @pytest.mark.asyncio
54  async def test_insert_alter(ds, unsafe):
55      async with httpx.AsyncClient(app=ds.app()) as client:
56          response = await client.post(

til/pytest/pytest-code-coverage.md

1   # Code coverage using pytest and codecov.io
2   
3   I got my [asgi-csrf](https://github.com/simonw/asgi-csrf) Python package up to 100% code coverage. Here's [the pull request](https://github.com/simonw/asgi-csrf/issues/13).
4   
5   I started by installing and using the [pytest-cov](https://pypi.org/project/pytest-cov/) pytest plugin.
6   
7   ```
8   pip install pytest-cov
9   pytest --cov=asgi_csrf
10  ```
11  This shows the current code coverage percentage for the `asgi_csrf` module in the terminal output:
13  collected 18 items                                                                                                                                                   
14  
15  test_asgi_csrf.py ..................                                                                                                                           [100%]
16  
17  ---------- coverage: platform darwin, python 3.7.3-final-0 -----------
23  ========= 18 passed in 0.37s =========
24  ```
25  To generate an HTML report showing which lines are not covered by tests:
26  ```
27  pytest --cov=asgi_csrf --cov-report=html
28  open htmlcov/index.html
29  ```
30  Here's a hosted copy of that report: https://asgi-csrf-htmlcov-ewca4t9se.vercel.app/asgi_csrf_py.html
31  
32  ## Failing the tests if coverage is below a certain threshold
33  
34  The `--cov-fail-under=100` option does this:
35  
36  ```
37  pytest --cov-fail-under=100 --cov asgi_csrf 
38  ======= test session starts =======
39  platform darwin -- Python 3.7.3, pytest-6.0.1, py-1.9.0, pluggy-0.13.1
40  rootdir: /Users/simon/Dropbox/Development/asgi-csrf
41  plugins: cov-2.10.1, asyncio-0.14.0
42  collected 18 items                                                                                                                                                   
43  
44  test_asgi_csrf.py ..................                                                                                                                           [100%]
45  
46  ---------- coverage: platform darwin, python 3.7.3-final-0 -----------
49  asgi_csrf.py     169     13    92%
50  
51  FAIL Required test coverage of 100% not reached. Total coverage: 92.31%
52  ```
53  I added this to my [GitHub test action](https://github.com/simonw/asgi-csrf/blob/83d2b4f6bb034b746fd3f20f57ebdbaeae007a73/.github/workflows/test.yml#L27-L29):
54  ```yaml
55      - name: Run tests
56        run: |
57          pytest --cov-fail-under=100 --cov asgi_csrf
58  ```
59  ## Pushing results to codecov.io
61  https://codecov.io/ offers free coverage reporting for open source projects. I authorized it against my GitHub account, then enabled it for the `asgi-csrf` project by navigating to https://codecov.io/gh/simonw/asgi-csrf (hacking the URL saves you from having to paginate through all of your repos looking for the right one).
62  
63  codecov.io gives you a token - set that as a GitHub repository secret as `CODECOV_TOKEN` - then add the following to the test action configuration:
64  ```yaml
65      - name: Upload coverage to codecov.io
69          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
70  ```
71  This will upload your coverage report (no matter if the previous test step failed or succeeded). codecove.io then reports back to pull requests and maintains a dashboard for your project.
72  
73  codecov.io doesn't detect if you use a `main` or `master` branch so I had to switch the default branch in the settings at https://codecov.io/gh/simonw/asgi-csrf/settings

til/pytest/mock-httpx.md

1   # How to mock httpx using pytest-mock
2   
3   I wrote this test to exercise some [httpx](https://pypi.org/project/httpx/) code today, using [pytest-mock](https://pypi.org/project/pytest-mock/).
4   
5   The key was to use `mocker.patch.object(cli, "httpx")` which patches the `httpx` module that was imported by the `cli` module.
6   
7   Here the `mocker` function argument is a fixture that is provided by `pytest-mock`.
8   
9   ```python
10  from conditional_get import cli
11  from click.testing import CliRunner
12  
13  
14  def test_performs_conditional_get(mocker):
15      m = mocker.patch.object(cli, "httpx")
16      m.get.return_value = mocker.Mock()
39          )
40  ```
41  https://github.com/simonw/conditional-get/blob/485fab46f01edd99818b829e99765ed9ce0978b5/tests/test_cli.py
42  
43  ## Mocking httpx.stream
54  https://stackoverflow.com/a/6112456 helped me figure out the following:
55  ```python
56  def test_performs_conditional_get(mocker):
57      m = mocker.patch.object(cli, "httpx")
58      m.stream.return_value.__enter__.return_value = mocker.Mock()
62      ]
63  ```
64  https://github.com/simonw/conditional-get/blob/80454f972d39e2b418572d7938146830fab98fa6/tests/test_cli.py
65  
66  ## Mocking an HTTP error triggered by response.raise_for_status()
68  The `response.raise_for_status()` raises an exception if an HTTP error (e.g. a 404 or 500) occurred.
69  
70  Here's how I [mocked that to return an error](https://github.com/simonw/airtable-to-yaml/blob/ebd94b2e29d6f2ec3dc64d161495a759330027e8/tests/test_airtable_to_yaml.py#L43-L56):
71  
72  ```python
73  def test_airtable_to_yaml_error(mocker):
74      m = mocker.patch.object(cli, "httpx")
75      m.get.return_value = mocker.Mock()

til/pytest/assert-dictionary-subset.md

1   # Asserting a dictionary is a subset of another dictionary
2   
3   My [lazy approach to writing unit tests](https://simonwillison.net/2020/Feb/11/cheating-at-unit-tests-pytest-black/) means that sometimes I want to run an assertion against most (but not all) of a dictionary.
4   
5   Take for example an API endpoint that returns something like this:
12  }
13  ```
14  I want to efficiently assert against the second two keys, but I don't want to hard-code the SQLite version into my test in case it changes in the future.
15  
16  Solution:
25  The trick here is using `expected.items() <= actual.items()` to assert that one dictionary is a subset of another.
26  
27  Here's a recent example test that uses this trick: https://github.com/simonw/datasette/blob/40885ef24e32d91502b6b8bbad1c7376f50f2830/tests/test_plugins.py#L414-L446

til/nginx/proxy-domain-sockets.md

27  (The `$PWD` seems necessary to avoid `nginx` looking in its default directory.)
28  
29  Start something listening on the `/tmp/datasette.sock` path - with the latest Datasette you can do this:
30  
31      datasette --uds /tmp/datasette.sock

til/mediawiki/mediawiki-sqlite-macos.md

30  ## Downloading MediaWiki
31  
32  I downloaded the latest version of MediaWiki from [their downloads page](https://www.mediawiki.org/wiki/Download), unzipped it, ran `php -S localhost:8000` and got this error message:
33  
34  <img width="664" alt="MediaWiki_1_35" src="https://user-images.githubusercontent.com/9599/110229420-1d932280-7ebe-11eb-98f3-eb13fa7f07c7.png">
35  
36  Turns out the latest MediaWiki requires PHP 7.3.19, but the version bundled with my laptop was 7.3.11.
37  
38  I didn't want to mess around with upgrading PHP, so I used the [compatibility page](https://www.mediawiki.org/wiki/Compatibility#PHP) to figure out the most recent MediaWiki version that would work with PHP 7.3.11. I decided to try MediaWiki 1.31, which can be downloaded from <https://releases.wikimedia.org/mediawiki/1.31/?C=S;O=D>

til/macos/zsh-pip-install.md

1   # Running pip install -e .[test] in zsh on macOS Catalina
2   
3   macOS Catalina uses `zsh` rather than `bash` as the default shell (apparently because Apple don't like GPL 3).
9        . /Users/simon/.local/share/virtualenvs/datasette-AWNrQs95/bin/activate                                                                         
10      datasette %  . /Users/simon/.local/share/virtualenvs/datasette-AWNrQs95/bin/activate
11      (datasette) simon@Simons-MacBook-Pro datasette % pip install -e .[test]
12      zsh: no matches found: .[test]
13  
14  In `zsh` the `[` character has special meaning.
16  Two solutions. The first is to use quotes:
17  
18      datasette % pip install -e '.[test]'
19      Obtaining file:///Users/simon/Dropbox/Development/datasette
20      ...
22  The second is to prefix it with `noglob`:
23  
24      datasette % noglob pip install -e .[test]

til/javascript/jest-without-package-json.md

1   # Using Jest without a package.json
2   
3   I wanted to try out [Jest](https://jestjs.io/) for writing JavaScript unit tests, in a project that wasn't set up with `package.json` and other NPM related things.
4   
5   Jest looks for `*.spec.js` tests in a `__tests__` directory. It expects to find configuration in a `package.json` file but it can be passed configuration using the `-c` option - which can be a path to a JSON configuration file or can be a JSON literal.
6   
7   I created a file I wanted to test in `plugins.js` which looked like this. The `module.exports` at the bottom was required so Jest could later import the code:
8   
9   ```javascript
36  ```
37  
38  Then I created `__tests__/plugins.spec.js` with this:
39  
40  ```javascript
42  
43  describe("Datasette Plugins", () => {
44    test("it should have datasette.plugins", () => {
45      expect(!!datasette.plugins).toEqual(true);
46    });
47    test("registering a plugin should work", () => {
48      datasette.plugins.register("numbers", (a, b) => a + b, ["a", "b"]);
49      var result = datasette.plugins.call("numbers", { a: 1, b: 2 });
58  ```
59  % npx jest -c '{}'
60   PASS  __tests__/plugins.spec.js
61    Datasette Plugins
62      ✓ it should have datasette.plugins (3 ms)
67  Snapshots:   0 total
68  Time:        1.163 s
69  Ran all test suites.
70  ```

til/javascript/dropdown-menu-with-details-summary.md

22  </details>
23  ```
24  See the top right corner of https://latest-with-plugins.datasette.io/ for a demo.
25  
26  This displays an SVG icon which, when clicked, expands to show the menu. The SVG icon uses `aria-labelledby="nav-menu-svg-title" role="img"` and a `<title id="nav-menu-svg-title">` element for accessibility.

til/homebrew/upgrading-python-homebrew-packages.md

1   # Upgrading Python Homebrew packages using pip
2   
3   [VisiData 2.0](https://www.visidata.org/) came out today. I previously installed VisiData using Homebrew, but the VisiData tap has not yet been updated with the latest version.
4   
5   Homebrew Python packages (including the packages for [Datasette](https://formulae.brew.sh/formula/datasette) and [sqlite-utils](https://formulae.brew.sh/formula/sqlite-utils)) work by setting up their own package-specific virtual environments. This means you can upgrade them without waiting for the tap.

til/homebrew/packaging-python-cli-for-homebrew.md

54    end
55  
56    test do
57      system bin/"datasette", "--help"
58    end
97  `poet -f datasette` generates the full formula.
98  
99  You need to fill in the description and the `test` block, but other than that it looks like it should work straight away.
100 
101 ## Implementing the test block
102 
103 https://docs.brew.sh/Formula-Cookbook#add-a-test-to-the-formula says:
104 
105 > We want tests that don't require any user input and test the basic functionality of the application. For example `foo build-foo input.foo` is a good test and (despite their widespread use) `foo --version` and `foo --help` are bad tests. However, a bad test is better than no test at all.
106 
107 Here's the test block I ended up using for Datasette:
108 
109 ```ruby
110   test do
111     assert_match "15", shell_output("#{bin}/datasette --get '/:memory:.csv?sql=select+3*5'")
112     assert_match "<title>Datasette:", shell_output("#{bin}/datasette --get '/'")
114 ```
115 
116 And here's my test for `sqlite-utils`:
117 
118 ```ruby
119   test do
120     assert_match "15", shell_output("#{bin}/sqlite-utils :memory: 'select 3 * 5'")
121   end

til/heroku/pg-pull.md

27      heroku pg:pull HEROKU_POSTGRESQL_JADE_URL simonwillisonblog -a simonwillisonblog
28  
29  This created a local PostgreSQL database called `simonwillisonblog` and imported my latest backup.
30  
31  When I ran it a second time I had to use `dropdb simonwillisonblog` first to drop the existing local database.

til/github-actions/service-containers-docker.md

3   I have a Django application which uses PostgreSQL. I build the Django application into its own Docker container, push that built container to the GitHub package registery and then deploy that container to production.
4   
5   I wanted to run the tests inside the container as part of the deployment process, to make sure the container that I build is ready to be deployed (via continuous deployment).
6   
7   In production I'm using Digital Ocean PostgreSQL rather than running PostgreSQL in a container. For running the tests I decided to use GitHub's [PostgreSQL service containers](https://docs.github.com/en/actions/guides/creating-postgresql-service-containers) to run the tests.
8   
9   But how do you set it up so tests running inside a Docker container can talk to the PostgreSQL service container provided by the GitHub Actions environment?
10  
11  This took a while to figure out. The key insight was that Docker containers (at least on Linux) have a magic IP address, `172.17.0.1`, which can be used to access their host environment - and GitHub's PostgreSQL container is available to that host environment on localhost port 5432.
14  
15  ```yaml
16  name: Build, test and deploy
17  
18  on:
20  
21  jobs:
22    build_test_deploy:
23      runs-on: ubuntu-latest
24      services:
25        postgres:
42        run: |-
43          docker build -t my-tag .
44      - name: Run tests
45        run: |-
46          docker run \
47            -e DATABASE_URL="postgres://postgres:postgres@172.17.0.1:5432/postgres" \
48            --entrypoint=/app/github-actions-runtests.sh \
49            my-tag
50  ```
51  My `github-actions-runtests.sh` file uses [django-pytest](https://pytest-django.readthedocs.io/) and looks like this:
52  ```bash
53  #!/bin/bash
54  cd /app
55  pytest --ds=config.test_settings
56  ```

til/github-actions/prettier-github-actions.md

23  jobs:
24    prettier:
25      runs-on: ubuntu-latest
26      steps:
27      - name: Check out repo
39  ```
40  
41  The `npx prettier --check 'datasette/static/*[!.min].js'` line ensures that prettier is run in "check" mode (which fails the tests if a matching file does not conform to the formatting rules) - it checks any `.js` file in the `datasette/static` folder but excludes any `.min.js` minified files.
42  
43  I'm using `npx` to run Prettier which installs it if it is missing - as far as I can tell `npx` respects the `.npm` cache so I'm using that to avoid downloading a new copy of Prettier every time. **UPDATE:** Apparently it doesn't, see [#1169](https://github.com/simonw/datasette/issues/1169)

til/github/graphql-pagination-python.md

3   (See also [Building a self-updating profile README for GitHub](https://simonwillison.net/2020/Jul/10/self-updating-profile-readme/) on my blog)
4   
5   For my [auto-updating personal README](https://twitter.com/simonw/status/1281435464474324993) I needed to fetch the latest release for every repository I have on GitHub. Since I have 316 public repos I wanted the most efficent way possible to do this. I decided to use the [GitHub GraphQL API](https://developer.github.com/v4/).
6   
7   Their API allows you to fetch up to 100 repositories at once, and each one can return up to 100 releases. Since I only wanted the most recent release my query ended up looking like this:

til/github/graphql-search-topics.md

9   An oddity of GitHub search is that sort order can be defined using tokens that form part of the search query!
10  
11  Here's a GraphQL query [tested here](https://developer.github.com/v4/explorer/) that returns the most recent 100 `git-scraping` tagged repos, sorted by most recently updated.
12  
13  ```graphql

til/github/dependencies-graphql-api.md

24  I added `https://api.github.com/graphql` as the endpoint.
25  
26  I tested it by running these queries:
27  ```graphql
28  {

til/github-actions/postgresq-service-container.md

1   # Running tests against PostgreSQL in a service container
2   
3   I wanted to run some Django tests - using `pytest-django` and with Django configured to pick up the `DATABASE_URL` environment variable via [dj-database-url](https://github.com/jacobian/dj-database-url) - against a PostgreSQL server running in GitHub Actions.
4   
5   It took a while to figure out the right pattern. The trick was to define a `postgres:` service and then set the `DATABASE_URL` environment variable to the following:
7       postgres://postgres:postgres@127.0.0.1:${{ job.services.postgres.ports['5432'] }}/dbname
8   
9   Here's my full `.github/workflows/test.yml`:
10  
11  ```yaml
12  name: Run tests
13  
14  on: [push]
15  
16  jobs:
17    test:
18      runs-on: ubuntu-latest
19      services:
20        postgres:
44        run: |
45          pip install -r requirements.txt
46      - name: Run tests
47        env:
48          DATABASE_URL: postgres://postgres:postgres@127.0.0.1:${{ job.services.postgres.ports['5432'] }}/dbname
49        run: |
50          cd myproject
51          pytest
52  ```
53  
54  ## And against MySQL
55  
56  I had to figure this out against MySQL as well for `db-to-sqlite` - here's [the workflow test.yml file](https://github.com/simonw/db-to-sqlite/blob/1.4/.github/workflows/test.yml) I ended up with. Key extract here:
57  
58  ```yaml
62          env:
63            MYSQL_ALLOW_EMPTY_PASSWORD: yes
64            MYSQL_DATABASE: test_db_to_sqlite
65          options: >-
66            --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3
68            - 3306:3306
69      # ...
70      - name: Run tests
71        env:
72          MYSQL_TEST_DB_CONNECTION: mysql://root@127.0.0.1:${{ job.services.mysql.ports['3306'] }}/test_db_to_sqlite
73        run: pytest -vv
74  ```

til/github-actions/markdown-table-of-contents.md

36  jobs:
37    build:
38      runs-on: ubuntu-latest
39      steps:
40      - name: Check out repo

til/github-actions/grep-tests.md

1   # Using grep to write tests in CI
2   
3   GitHub Actions workflows fail if any of the steps executes something that returns a non-zero exit code.
5   Today I learned that `grep` returns a non-zero exit code if it fails to find any matches.
6   
7   This means that piping to grep is a really quick way to write a test as part of an Actions workflow.
8   
9   I wrote a quick soundness check today using the new `datasette --get /path` option, which runs a fake HTTP request for that path through Datasette and returns the response to standard out. Here's an example:
12      - name: Build database
13        run: scripts/build.sh
14      - name: Run tests
15        run: |
16          datasette . --get /us/pillar-point | grep 'Rocky Beaches'

til/github-actions/different-steps-on-a-schedule.md

20  jobs:
21    build_and_deploy:
22      runs-on: ubuntu-latest
23      steps:
24      # ...

til/github-actions/different-postgresql-versions.md

1   # Installing different PostgreSQL server versions in GitHub Actions
2   
3   The GitHub Actions `ubuntu-latest` default runner currently includes an installation of PostgreSQL 13. The server is not running by default but you can interact with it like this:
4   ```
5   $ /usr/lib/postgresql/13/bin/postgres --version
15  This works with `postgresql-10` and `postgresql-11` as well as `postgresql-12`.
16  
17  I wanted to use a GitHub Actions matrix to run my tests against all four versions of PostgreSQL. Here's [my complete workflow](https://github.com/simonw/django-sql-dashboard/blob/1.0.1/.github/workflows/test.yml) - the relevant sections are below:
18  ```yaml
19  name: Test
22  
23  jobs:
24    test:
25      runs-on: ubuntu-latest
26      strategy:
27        matrix:
36          sudo apt-get update
37          sudo apt-get -y install "postgresql-$POSTGRESQL_VERSION"
38      - name: Run tests
39        env:
40          POSTGRESQL_VERSION: ${{ matrix.postgresql-version }}
42          export POSTGRESQL_PATH="/usr/lib/postgresql/$POSTGRESQL_VERSION/bin/postgres"
43          export INITDB_PATH="/usr/lib/postgresql/$POSTGRESQL_VERSION/bin/initdb"
44          pytest
45  ```
46  I modified my tests to call the `postgres` and `initdb` binaries specified by the `POSTGRESQL_PATH` and `INITDB_PATH` environment variables.

til/github-actions/debug-tmate.md

13  jobs:
14    build:
15      runs-on: ubuntu-latest
16      steps:
17      - uses: actions/checkout@v2
27  I ran `ssh JA69KaB2avRPRZSkRb8JPa9Gd@nyc1.tmate.io` and got a direction connection to the Action, with my project files all available thanks to the `- uses: actions/checkout@v2` step.
28  
29  Once I'd finish testing things out in that environment, I typed `touch continue` and the session ended itself.
30  
31  ## Starting a shell just for test failures on manual runs
32  
33  I had a tricky test failure that I wanted to debug interactively. Here's a recipe for starting a tmate shell ONLY if the previous step failed, and only if the run was triggered manually (using `workflow_dispatch`) - because I don't want an accidental test opening up a shell and burning up my GitHub Actions minutes allowance.
34  
35  ```yaml
36      steps:
37      - name: Run tests
38        run: pytest
39      - name: tmate session if tests fail
40        if: failure() && github.event_name == 'workflow_dispatch'
41        uses: mxschmitt/action-tmate@v3

til/github-actions/commit-if-file-changed.md

41  
42  ```yaml
43  name: Fetch latest data
44  
45  on:
51  jobs:
52    scheduled:
53      runs-on: ubuntu-latest
54      steps:
55      - name: Check out this repo
56        uses: actions/checkout@v2
57      - name: Fetch latest data
58        run: |-
59          curl https://c19downloads.azureedge.net/downloads/data/data_latest.json | jq . > data_latest.json
60          curl https://c19pub.azureedge.net/utlas.geojson | gunzip | jq . > utlas.geojson
61          curl https://c19pub.azureedge.net/countries.geojson | gunzip | jq . > countries.geojson
67          git add -A
68          timestamp=$(date -u)
69          git commit -m "Latest data: ${timestamp}" || exit 0
70          git push
71  ```

til/electron/sign-notarize-electron-macos.md

150         "./dist/mac/Datasette.app/Contents/Resources/python/bin/python3.9",
151         "./dist/mac/Datasette.app/Contents/Resources/python/lib/python3.9/lib-dynload/xxlimited.cpython-39-darwin.so",
152         "./dist/mac/Datasette.app/Contents/Resources/python/lib/python3.9/lib-dynload/_testcapi.cpython-39-darwin.so"
153       ]
154     },
193 ## Automating it all with GitHub Actions
194 
195 I decided to build and notarize on _every push_ to my repository, so I could save the resulting build as an artifact and install any in-progress work on a computer to test it.
196 
197 Apple [limit you to 75 notarizations a day](https://developer.apple.com/documentation/security/notarizing_macos_software_before_distribution/customizing_the_notarization_workflow#3561440) so I think this is OK for my projects.
198 
199 My full [test.yml](https://github.com/simonw/datasette-app/blob/0.1.0/.github/workflows/test.yml) looks like this:
200 
201 ```yaml
205 
206 jobs:
207   test:
208     runs-on: macos-latest
209     steps:
210       - uses: actions/checkout@v2
232         run: |
233           ./download-python.sh
234       - name: Run tests
235         run: npm test
236         timeout-minutes: 5
237       - name: Build distribution

til/docker/debian-unstable-packages.md

3   For [Datasette #1249](https://github.com/simonw/datasette/issues/1249) I wanted to build a Docker image from the `python:3.9.2-slim-buster` base image ("buster" is the current stable release of Debian) but include a single package from "sid", the unstable Debian distribution.
4   
5   I needed to do this because the latest version of SpatiaLite, version 5, was available in `sid` but not in `buster` (which only has 4.3.0a):
6   
7   https://packages.debian.org/search?keywords=spatialite
8   
9   <img width="923" alt="Package libsqlite3-mod-spatialite&#13;&#13;stretch (oldstable) (libs): Geospatial extension for SQLite - loadable module&#13;    4.3.0a-5+b1: amd64 arm64 armel armhf i386 mips mips64el mipsel ppc64el s390x&#13;    buster (stable) (libs): Geospatial extension for SQLite - loadable module&#13;    4.3.0a-5+b2: amd64 arm64 armel armhf i386 mips mips64el mipsel ppc64el s390x&#13;    bullseye (testing) (libs): Geospatial extension for SQLite - loadable module&#13;    5.0.1-2: amd64 arm64 armel armhf i386 mips64el mipsel ppc64el s390x&#13;    sid (unstable) (libs): Geospatial extension for SQLite - loadable module&#13;    5.0.1-2: alpha amd64 arm64 armel armhf hppa i386 m68k mips64el mipsel ppc64 ppc64el riscv64 s390x sh4 sparc64 x32&#13;    experimental (libs): Geospatial extension for SQLite - loadable module&#13;    5.0.0~beta0-1~exp2 [debports]: powerpcspe" src="https://user-images.githubusercontent.com/9599/112061886-5cf77b00-8b1c-11eb-8f4c-91dce388dc33.png">
10  
11  The recipe that ended up working for me was to install `software-properties-common` to get the `apt-get-repository` command, then use that to install a package from `sid`:

til/django/testing-django-admin-with-pytest.md

1   # Writing tests for the Django admin with pytest-django
2   
3   I'm using [pytest-django](https://pytest-django.readthedocs.io/) on a project and I wanted to write a test for a Django admin create form submission. Here's the pattern I came up with:
4   
5   ```python
6   from .models import Location
7   import pytest
8   
9   
10  def test_admin_create_location_sets_public_id(client, admin_user):
11      client.force_login(admin_user)
12      assert Location.objects.count() == 0
28      assert location.public_id == "lc"
29  ```
30  The trick here is to use the `client` and `admin_user` pytest-django fixtures ([documented here](https://pytest-django.readthedocs.io/en/latest/helpers.html#fixtures)) to get a configured test client and admin user object, then use `client.force_login(admin_user)` to obtain a session where that user is signed-in to the admin. Then write tests as normal.
31  
32  ## Using the admin_client fixture
33  
34  Even better: use the `admin_client` fixture provided by `pytest-django 
35  ` which is already signed into the admin:
36  
37  ```python
38  def test_admin_create_location_sets_public_id(admin_client):
39      response = admin_client.post(
40          "/admin/core/location/add/",
45  
46  ```python
47  import pytest
48  
49  
50  @pytest.fixture()
51  def admin_client(client, admin_user):
52      client.force_login(admin_user)
53      return client
54  
55  # Then write tests like this:
56  def test_admin_create_location_sets_public_id(admin_client):
57      response = admin_client.post(
58          "/admin/core/location/add/",

til/django/efficient-bulk-deletions-in-django.md

25  This didn't quite work either, because I have another model `Location` with foreign key references to those reports. So I added this:
26  ```python
27  Location.objects.filter(latest_report__public_id__in=report_ids).update(
28      latest_report=None
29  )
30  ```

til/digitalocean/datasette-on-digitalocean-app-platform.md

35  
36  ```
37  wget https://latest.datasette.io/fixtures.db
38  ```
39  And this resulted in the `fixtures.db` folder being served at `/fixtures` under my app's subdomain.

til/datasette/search-all-columns-trick.md

78  I tried this against the FiveThirtyEight database and the query it produced was way beyond the URL length limit for Cloud Run.
79  
80  Here's the result if [run against latest.datasette.io/fixtures](https://latest.datasette.io/fixtures?sql=with+tables+as+%28%0D%0A++select%0D%0A++++name+as+table_name%0D%0A++from%0D%0A++++sqlite_master%0D%0A++where%0D%0A++++type+%3D+%27table%27%0D%0A%29%2C%0D%0Aqueries+as+%28%0D%0A++select%0D%0A++++%27select+%27%27%27+%7C%7C+tables.table_name+%7C%7C+%27%27%27+as+_table%2C+rowid+from+%22%27+%7C%7C+tables.table_name+%7C%7C+%27%22+where+%27+%7C%7C+group_concat%28%0D%0A++++++%27%22%27+%7C%7C+name+%7C%7C+%27%22+like+%27%27%25%27%27+%7C%7C+%3Asearch+%7C%7C+%27%27%25%27%27%27%2C%0D%0A++++++%27+or+%27%0D%0A++++%29+as+query%0D%0A++from%0D%0A++++pragma_table_info%28tables.table_name%29%2C%0D%0A++++tables%0D%0A++group+by%0D%0A++++tables.table_name%0D%0A%29%0D%0Aselect%0D%0A++group_concat%28query%2C+%27+union+all+%27%29%0D%0Afrom%0D%0A++queries):
81  
82  ```sql
386   or "frequency" like '%' || :search || '%'
387 ```
388 [It works!](https://latest.datasette.io/fixtures?sql=select%0D%0A++%27123_starts_with_digits%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22123_starts_with_digits%22%0D%0Awhere%0D%0A++%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27Table+With+Space+In+Name%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22Table+With+Space+In+Name%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27attraction_characteristic%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22attraction_characteristic%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27binary_data%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22binary_data%22%0D%0Awhere%0D%0A++%22data%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27complex_foreign_keys%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22complex_foreign_keys%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22f1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22f2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22f3%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27compound_primary_key%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22compound_primary_key%22%0D%0Awhere%0D%0A++%22pk1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22pk2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27compound_three_primary_keys%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22compound_three_primary_keys%22%0D%0Awhere%0D%0A++%22pk1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22pk2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22pk3%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27custom_foreign_key_label%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22custom_foreign_key_label%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_with_custom_label%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27facet_cities%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22facet_cities%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27facetable%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22facetable%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22created%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22planet_int%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22on_earth%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22state%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22city_id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22neighborhood%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22tags%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22complex_array%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22distinct_some_null%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27foreign_key_references%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22foreign_key_references%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_with_label%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_with_blank_label%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_with_no_label%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_compound_pk1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_compound_pk2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27infinity%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22infinity%22%0D%0Awhere%0D%0A++%22value%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27no_primary_key%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22no_primary_key%22%0D%0Awhere%0D%0A++%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22a%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22b%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22c%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27primary_key_multiple_columns%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22primary_key_multiple_columns%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27primary_key_multiple_columns_explicit_label%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22primary_key_multiple_columns_explicit_label%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27roadside_attraction_characteristics%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22roadside_attraction_characteristics%22%0D%0Awhere%0D%0A++%22attraction_id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22characteristic_id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27roadside_attractions%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22roadside_attractions%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22address%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22latitude%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22longitude%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22text1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22text2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name+with+.+and+spaces%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts%22%0D%0Awhere%0D%0A++%22text1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22text2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name+with+.+and+spaces%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts_docsize%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts_docsize%22%0D%0Awhere%0D%0A++%22docid%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22size%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts_segdir%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts_segdir%22%0D%0Awhere%0D%0A++%22level%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22idx%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22start_block%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22leaves_end_block%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22end_block%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22root%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts_segments%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts_segments%22%0D%0Awhere%0D%0A++%22blockid%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22block%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts_stat%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts_stat%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22value%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_tags%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_tags%22%0D%0Awhere%0D%0A++%22searchable_id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22tag%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27select%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22select%22%0D%0Awhere%0D%0A++%22group%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22having%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22and%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22json%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27simple_primary_key%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22simple_primary_key%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27sortable%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22sortable%22%0D%0Awhere%0D%0A++%22pk1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22pk2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22sortable%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22sortable_with_nulls%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22sortable_with_nulls_2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22text%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27table%2Fwith%2Fslashes.csv%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22table%2Fwith%2Fslashes.csv%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27tags%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22tags%22%0D%0Awhere%0D%0A++%22tag%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27units%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22units%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22distance%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22frequency%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27&search=museum&_hide_sql=1)

til/cookiecutter/pytest-for-cookiecutter.md

1   # Testing cookiecutter templates with pytest
2   
3   I added some unit tests to my [datasette-plugin](https://github.com/simonw/datasette-plugin) cookiecutter template today, since the latest features involved adding a `hooks/post_gen_project.py` script.
4   
5   Here's [the full test script](https://github.com/simonw/datasette-plugin/blob/503e6fef8e1000ab70103a61571d47ce966064ba/tests/test_cookiecutter_template.py) I wrote. It lives in `tests/test_cookiecutter_template.py` in the root of the repository.
6   
7   To run the tests I have to use `pytest tests` because running just `pytest` gets confused when it tries to run the templated tests that form part of the cookiecutter template.
8   
9   The pattern I'm using looks like this:
16  
17  
18  def test_static_and_templates(tmpdir):
19      cookiecutter(
20          template=TEMPLATE_DIRECTORY,
33          "datasette-foo/.github/workflows",
34          "datasette-foo/.github/workflows/publish.yml",
35          "datasette-foo/.github/workflows/test.yml",
36          "datasette-foo/.gitignore",
37          "datasette-foo/datasette_foo",
41          "datasette-foo/README.md",
42          "datasette-foo/setup.py",
43          "datasette-foo/tests",
44          "datasette-foo/tests/test_foo.py",
45      }
46      setup_py = (tmpdir / "datasette-foo" / "setup.py").read_text("utf-8")

til/bash/finding-bom-csv-files-with-ripgrep.md

1   # Finding CSV files that start with a BOM using ripgrep
2   
3   For [sqlite-utils issue 250](https://github.com/simonw/sqlite-utils/issues/250) I needed to locate some test CSV files that start with a UTF-8 BOM.
4   
5   Here's how I did that using [ripgrep](https://github.com/BurntSushi/ripgrep):

til/asgi/lifespan-test-httpx.md

1   # Writing tests for the ASGI lifespan protocol with HTTPX
2   
3   Uvicorn silently ignores exceptions that occur during startup against the ASGI lifespan protocol - see [starlette/issues/486](https://github.com/encode/starlette/issues/486).
7   This exposed a bug in `datasette-debug-asgi`: it wasn't handling lifespan events correctly. [datasette-debug-asgi/issues/1](https://github.com/simonw/datasette-debug-asgi/issues/1)
8   
9   The unit tests weren't catching this because using HTTPX to make test requests [doesn't trigger lifespan events](https://github.com/encode/httpx/issues/350).
10  
11  Florimond Manca had run into this problem too, and built [asgi-lifespan](https://github.com/florimondmanca/asgi-lifespan) to address it.
13  You can wrap an ASGI app in `async with LifespanManager(app):` and the correct lifespan events will be fired by that with block.
14  
15  Here's how to use it to [trigger lifespan events in a test](https://github.com/simonw/datasette-debug-asgi/blob/72d568d32a3159c763ce908c0b269736935c6987/test_datasette_debug_asgi.py):
16  
17  ```python
18  from asgi_lifespan import LifespanManager
19  
20  @pytest.mark.asyncio
21  async def test_datasette_debug_asgi():
22      ds = Datasette([], memory=True)
23      app = ds.app()

til/README.md

16  * [Running different steps on a schedule](https://github.com/simonw/til/blob/main/github-actions/different-steps-on-a-schedule.md) - 2020-04-20
17  * [Updating a Markdown table of contents with a GitHub Action](https://github.com/simonw/til/blob/main/github-actions/markdown-table-of-contents.md) - 2020-07-22
18  * [Using grep to write tests in CI](https://github.com/simonw/til/blob/main/github-actions/grep-tests.md) - 2020-08-19
19  * [Skipping a GitHub Actions step without failing](https://github.com/simonw/til/blob/main/github-actions/continue-on-error.md) - 2020-08-22
20  * [Open a debugging shell in GitHub Actions with tmate](https://github.com/simonw/til/blob/main/github-actions/debug-tmate.md) - 2020-09-14
21  * [Talking to a PostgreSQL service container from inside a Docker container](https://github.com/simonw/til/blob/main/github-actions/service-containers-docker.md) - 2020-09-18
22  * [Using Prettier to check JavaScript code style in GitHub Actions](https://github.com/simonw/til/blob/main/github-actions/prettier-github-actions.md) - 2020-12-31
23  * [Running tests against PostgreSQL in a service container](https://github.com/simonw/til/blob/main/github-actions/postgresq-service-container.md) - 2021-02-23
24  * [Installing different PostgreSQL server versions in GitHub Actions](https://github.com/simonw/til/blob/main/github-actions/different-postgresql-versions.md) - 2021-07-05
25  * [Attaching a generated file to a GitHub release using Actions](https://github.com/simonw/til/blob/main/github-actions/attach-generated-file-to-release.md) - 2021-09-07
84  ## macos
85  
86  * [Running pip install -e .[test] in zsh on macOS Catalina](https://github.com/simonw/til/blob/main/macos/zsh-pip-install.md) - 2020-04-21
87  * [Get Skitch working on Catalina](https://github.com/simonw/til/blob/main/macos/skitch-catalina.md) - 2020-04-21
88  * [Close terminal window on Ctrl+D for macOS](https://github.com/simonw/til/blob/main/macos/close-terminal-on-ctrl-d.md) - 2020-04-21
106 * [Restricting SSH connections to devices within a Tailscale network](https://github.com/simonw/til/blob/main/tailscale/lock-down-sshd.md) - 2020-04-23
107 
108 ## pytest
109 
110 * [Session-scoped temporary directories in pytest](https://github.com/simonw/til/blob/main/pytest/session-scoped-tmp.md) - 2020-04-26
111 * [How to mock httpx using pytest-mock](https://github.com/simonw/til/blob/main/pytest/mock-httpx.md) - 2020-04-29
112 * [Asserting a dictionary is a subset of another dictionary](https://github.com/simonw/til/blob/main/pytest/assert-dictionary-subset.md) - 2020-05-28
113 * [Registering temporary pluggy plugins inside tests](https://github.com/simonw/til/blob/main/pytest/registering-plugins-in-tests.md) - 2020-07-21
114 * [Code coverage using pytest and codecov.io](https://github.com/simonw/til/blob/main/pytest/pytest-code-coverage.md) - 2020-08-15
115 * [Start a server in a subprocess during a pytest session](https://github.com/simonw/til/blob/main/pytest/subprocess-server.md) - 2020-08-31
116 
117 ## github
143 ## asgi
144 
145 * [Writing tests for the ASGI lifespan protocol with HTTPX](https://github.com/simonw/til/blob/main/asgi/lifespan-test-httpx.md) - 2020-06-29
146 
147 ## heroku
169 * [PostgreSQL full-text search in the Django Admin](https://github.com/simonw/til/blob/main/django/postgresql-full-text-search-admin.md) - 2020-07-25
170 * [Adding extra read-only information to a Django admin change page](https://github.com/simonw/til/blob/main/django/extra-read-only-admin-information.md) - 2021-02-25
171 * [Writing tests for the Django admin with pytest-django](https://github.com/simonw/til/blob/main/django/testing-django-admin-with-pytest.md) - 2021-03-02
172 * [Show the timezone for datetimes in the Django admin](https://github.com/simonw/til/blob/main/django/show-timezone-in-django-admin.md) - 2021-03-02
173 * [Pretty-printing all read-only JSON in the Django admin](https://github.com/simonw/til/blob/main/django/pretty-print-json-admin.md) - 2021-03-07
272 ## cookiecutter
273 
274 * [Testing cookiecutter templates with pytest](https://github.com/simonw/til/blob/main/cookiecutter/pytest-for-cookiecutter.md) - 2021-01-27
275 * [Conditionally creating directories in cookiecutter](https://github.com/simonw/til/blob/main/cookiecutter/conditionally-creating-directories.md) - 2021-01-27
276 

swarm-to-sqlite/tests/test_save_checkin.py

1   from swarm_to_sqlite import utils
2   import pytest
3   import json
4   import sqlite_utils
12  
13  
14  @pytest.fixture(scope="session")
15  def converted():
16      db = sqlite_utils.Database(":memory:")
21  
22  
23  def test_tables(converted):
24      assert {
25          "venues",
40  
41  
42  def test_venue(converted):
43      venue = list(converted["venues"].rows)[0]
44      assert {
75  
76  
77  def test_event(converted):
78      event = list(converted["events"].rows)[0]
79      assert {"id": "5bf8e4fb646e38002c472397", "name": "A movie"} == event
96  
97  
98  def test_sticker(converted):
99      sticker = list(converted["stickers"].rows)[0]
100     assert {
112 
113 
114 def test_likes(converted):
115     likes = list(converted["likes"].rows)
116     assert [
121 
122 
123 def test_with_(converted):
124     with_ = list(converted["with"].rows)
125     assert [{"users_id": "900", "checkins_id": "592b2cfe09e28339ac543fde"}] == with_
126 
127 
128 def test_users(converted):
129     users = list(converted["users"].rows)
130     assert [
177 
178 
179 def test_photos(converted):
180     assert [
181         ForeignKey(
227 
228 
229 def test_posts(converted):
230     assert [
231         ForeignKey(
254 
255 
256 def test_checkin_with_no_event():
257     checkin = load_checkin()
258     # If no event in checkin, event column should be None
265 
266 
267 def test_view(converted):
268     assert {"checkin_details", "venue_details"} == set(converted.view_names())
269     assert [

swarm-to-sqlite/setup.py

33      """,
34      install_requires=["sqlite-utils>=3.3", "click", "requests"],
35      extras_require={"test": ["pytest"]},
36      tests_require=["swarm-to-sqlite[test]"],
37  )

tableau-to-sqlite/tests/test_tableau_to_sqlite.py

1   from click.testing import CliRunner
2   from tableau_to_sqlite.cli import cli
3   import pathlib
9   
10  @vcr.use_cassette(str(fixtures / "cassette.yml"))
11  def test_run(tmpdir):
12      runner = CliRunner()
13      db_path = str(tmpdir / "tableau.db")

tableau-to-sqlite/setup.py

33      """,
34      install_requires=["click", "TableauScraper==0.1.3"],
35      extras_require={"test": ["pytest", "vcrpy"]},
36      tests_require=["tableau-to-sqlite[test]"],
37      python_requires=">=3.6",
38  )

tableau-to-sqlite/README.md

66      pipenv shell
67  
68  Now install the dependencies and tests:
69  
70      pip install -e '.[test]'
71  
72  To run the tests:
73  
74      pytest

srccon-2020-datasette/build_database.py

7   
8   
9   def parse_times(s, datestring):
10      begin, end = s.split(" ")[0].split("-")
11      if not begin.endswith("m"):
13          begin += end[-2:]
14      begin_dt = parser.parse(
15          begin + " ET " + datestring, tzinfos={"ET": "America/New_York"}
16      )
17      end_dt = parser.parse(end + " ET " + datestring, tzinfos={"ET": "America/New_York"})
18      return begin_dt, end_dt
19  

srccon-2020-datasette/README.md

5   In this repository:
6   
7   * A [build_database.py](https://github.com/simonw/srccon-2020-datasette/blob/main/build_database.py) script which grabs the latest [SRCCON 2020 schedule JSON file](https://github.com/OpenNews/srccon-2020/blob/master/schedule/sessions.json) and uses the [sqlite-utils](https://github.com/simonw/sqlite-utils) Python library to convert it into a SQLite database
8   * A GitHub Actions workflow in [.github/workflows/build.yml](https://github.com/simonw/srccon-2020-datasette/blob/main/.github/workflows/build.yml) which runs that script and then deploys the resulting database to [Vercel](https://vercel.com/) using [datasette-publish-now](https://github.com/simonw/datasette-publish-now)
9   

sqlite-transform/tests/test_parsedate.py

1   from click.testing import CliRunner
2   from sqlite_transform import cli
3   import pytest
4   
5   
6   def test_parsedate(test_db_and_path):
7       db, db_path = test_db_and_path
8       result = CliRunner().invoke(cli.cli, ["parsedate", db_path, "example", "dt"])
9       assert 0 == result.exit_code, result.output
16  
17  
18  def test_parsedatetime(test_db_and_path):
19      db, db_path = test_db_and_path
20      result = CliRunner().invoke(cli.cli, ["parsedatetime", db_path, "example", "dt"])
21      assert 0 == result.exit_code, result.output
28  
29  
30  @pytest.mark.parametrize("command", ("parsedate", "parsedatetime", "jsonsplit"))
31  def test_column_required(test_db_and_path, command):
32      _, db_path = test_db_and_path
33      result = CliRunner().invoke(cli.cli, [command, db_path, "example"])
34      assert result.exit_code == 2, result.output
36  
37  
38  @pytest.mark.parametrize("command", ("parsedate", "parsedatetime", "jsonsplit"))
39  def test_cannot_use_drop_without_multi_or_output(test_db_and_path, command):
40      _, db_path = test_db_and_path
41      result = CliRunner().invoke(cli.cli, [command, db_path, "example", "id", "--drop"])
42      assert result.exit_code == 1, result.output
44  
45  
46  @pytest.mark.parametrize(
47      "command,options,expected",
48      (
53      ),
54  )
55  def test_dayfirst_yearfirst(fresh_db_and_path, command, options, expected):
56      db, db_path = fresh_db_and_path
57      db["example"].insert_all(
68  
69  
70  def test_parsedatetime_output(test_db_and_path):
71      db, db_path = test_db_and_path
72      result = CliRunner().invoke(
73          cli.cli, ["parsedatetime", db_path, "example", "dt", "--output", "parsed"]
82  
83  
84  def test_parsedatetime_output_drop(test_db_and_path):
85      db, db_path = test_db_and_path
86      result = CliRunner().invoke(
87          cli.cli,

sqlite-transform/tests/test_jsonsplit.py

1   from click.testing import CliRunner
2   import json
3   import pathlib
4   import pytest
5   from sqlite_transform import cli
6   import sqlite_utils
7   
8   
9   @pytest.mark.parametrize("delimiter", [None, ";", "-"])
10  def test_jsonsplit(tmpdir, delimiter):
11      db_path = str(pathlib.Path(tmpdir) / "data.db")
12      db = sqlite_utils.Database(db_path)
29  
30  
31  @pytest.mark.parametrize(
32      "type,expected_array",
33      (
37      ),
38  )
39  def test_jsonsplit_type(fresh_db_and_path, type, expected_array):
40      db, db_path = fresh_db_and_path
41      db["example"].insert_all(
53  
54  
55  @pytest.mark.parametrize("drop", (True, False))
56  def test_jsonsplit_output(fresh_db_and_path, drop):
57      db, db_path = fresh_db_and_path
58      db["example"].insert_all(

sqlite-transform/tests/test_lambda.py

1   from click.testing import CliRunner
2   from sqlite_transform import cli
3   import textwrap
4   import pytest
5   
6   
7   @pytest.mark.parametrize(
8       "code",
9       [
13      ],
14  )
15  def test_lambda_single_line(test_db_and_path, code):
16      db, db_path = test_db_and_path
17      result = CliRunner().invoke(
18          cli.cli, ["lambda", db_path, "example", "dt", "--code", code]
27  
28  
29  def test_lambda_multiple_lines(test_db_and_path):
30      db, db_path = test_db_and_path
31      result = CliRunner().invoke(
32          cli.cli,
49  
50  
51  def test_lambda_import(test_db_and_path):
52      db, db_path = test_db_and_path
53      result = CliRunner().invoke(
54          cli.cli,
73  
74  
75  def test_lambda_dryrun(test_db_and_path):
76      db, db_path = test_db_and_path
77      result = CliRunner().invoke(
78          cli.cli,
116 
117 
118 @pytest.mark.parametrize("drop", (True, False))
119 def test_lambda_output_column(test_db_and_path, drop):
120     db, db_path = test_db_and_path
121     args = [
122         "lambda",
153 
154 
155 @pytest.mark.parametrize(
156     "output_type,expected",
157     (
162     ),
163 )
164 def test_lambda_output_column_output_type(test_db_and_path, output_type, expected):
165     db, db_path = test_db_and_path
166     args = [
167         "lambda",
184 
185 
186 @pytest.mark.parametrize(
187     "options,expected_error",
188     [
219     ],
220 )
221 def test_lambda_output_error(test_db_and_path, options, expected_error):
222     db_path = test_db_and_path[1]
223     result = CliRunner().invoke(
224         cli.cli,
234 
235 
236 @pytest.mark.parametrize("drop", (True, False))
237 def test_lambda_multi(fresh_db_and_path, drop):
238     db, db_path = fresh_db_and_path
239     db["creatures"].insert_all(
267 
268 
269 def test_lambda_multi_complex_column_types(fresh_db_and_path):
270     db, db_path = fresh_db_and_path
271     db["rows"].insert_all(

sqlite-transform/tests/conftest.py

1   import pathlib
2   import pytest
3   import sqlite_utils
4   
5   
6   @pytest.fixture
7   def test_db_and_path(fresh_db_and_path):
8       db, db_path = fresh_db_and_path
9       db["example"].insert_all(
19  
20  
21  @pytest.fixture
22  def fresh_db_and_path(tmpdir):
23      db_path = str(pathlib.Path(tmpdir) / "data.db")

sqlite-transform/setup.py

28      """,
29      install_requires=["dateutils", "tqdm", "click", "sqlite-utils"],
30      extras_require={"test": ["pytest"]},
31      tests_require=["sqlite-transform[test]"],
32  )

sqlite-utils/sqlite_utils/utils.py

118         yield file
119         return
120     # file.fileno() throws an exception in our test suite
121     try:
122         fileno = file.fileno()
213 class ValueTracker:
214     def __init__(self):
215         self.couldbe = {key: getattr(self, "test_" + key) for key in self.get_tests()}
216 
217     @classmethod
218     def get_tests(cls):
219         return [
220             key.split("test_")[-1]
221             for key in cls.__dict__.keys()
222             if key.startswith("test_")
223         ]
224 
225     def test_integer(self, value):
226         try:
227             int(value)
230             return False
231 
232     def test_float(self, value):
233         try:
234             float(value)
244         options = set(self.couldbe.keys())
245         # Return based on precedence
246         for key in self.get_tests():
247             if key in options:
248                 return key
253             return
254         not_these = []
255         for name, test in self.couldbe.items():
256             if not test(value):
257                 not_these.append(name)
258         for key in not_these:

sqlite-utils/tests/test_wal.py

1   import pytest
2   from sqlite_utils import Database
3   
4   
5   @pytest.fixture
6   def db_path_tmpdir(tmpdir):
7       path = tmpdir / "test.db"
8       db = Database(str(path))
9       return db, path, tmpdir
10  
11  
12  def test_enable_disable_wal(db_path_tmpdir):
13      db, path, tmpdir = db_path_tmpdir
14      assert len(tmpdir.listdir()) == 1
15      assert "delete" == db.journal_mode
16      assert "test.db-wal" not in [f.basename for f in tmpdir.listdir()]
17      db.enable_wal()
18      assert "wal" == db.journal_mode
19      db["test"].insert({"foo": "bar"})
20      assert "test.db-wal" in [f.basename for f in tmpdir.listdir()]
21      db.disable_wal()
22      assert "delete" == db.journal_mode
23      assert "test.db-wal" not in [f.basename for f in tmpdir.listdir()]

sqlite-utils/tests/test_utils.py

1   from sqlite_utils import utils
2   import pytest
3   
4   
5   @pytest.mark.parametrize(
6       "input,expected,should_be_is",
7       [
15      ],
16  )
17  def test_decode_base64_values(input, expected, should_be_is):
18      actual = utils.decode_base64_values(input)
19      if should_be_is:
23  
24  
25  def test_find_spatialite():
26      spatialite = utils.find_spatialite()
27      assert spatialite is None or isinstance(spatialite, str)

sqlite-utils/tests/test_upsert.py

1   from sqlite_utils.db import PrimaryKeyRequired
2   import pytest
3   
4   
5   def test_upsert(fresh_db):
6       table = fresh_db["table"]
7       table.insert({"id": 1, "name": "Cleo"}, pk="id")
11  
12  
13  def test_upsert_all(fresh_db):
14      table = fresh_db["table"]
15      table.upsert_all([{"id": 1, "name": "Cleo"}, {"id": 2, "name": "Nixie"}], pk="id")
22  
23  
24  def test_upsert_all_single_column(fresh_db):
25      table = fresh_db["table"]
26      table.upsert_all([{"name": "Cleo"}], pk="name")
29  
30  
31  def test_upsert_error_if_no_pk(fresh_db):
32      table = fresh_db["table"]
33      with pytest.raises(PrimaryKeyRequired):
34          table.upsert_all([{"id": 1, "name": "Cleo"}])
35      with pytest.raises(PrimaryKeyRequired):
36          table.upsert({"id": 1, "name": "Cleo"})
37  
38  
39  def test_upsert_with_hash_id(fresh_db):
40      table = fresh_db["table"]
41      table.upsert({"foo": "bar"}, hash_id="pk")
46  
47  
48  def test_upsert_compound_primary_key(fresh_db):
49      table = fresh_db["table"]
50      table.upsert_all(

sqlite-utils/tests/test_update.py

2   import json
3   
4   import pytest
5   
6   from sqlite_utils.db import NotFoundError
7   
8   
9   def test_update_rowid_table(fresh_db):
10      table = fresh_db["table"]
11      rowid = table.insert({"foo": "bar"}).last_pk
14  
15  
16  def test_update_pk_table(fresh_db):
17      table = fresh_db["table"]
18      pk = table.insert({"foo": "bar", "id": 5}, pk="id").last_pk
22  
23  
24  def test_update_compound_pk_table(fresh_db):
25      table = fresh_db["table"]
26      pk = table.insert({"id1": 5, "id2": 3, "v": 1}, pk=("id1", "id2")).last_pk
30  
31  
32  @pytest.mark.parametrize(
33      "pk,update_pk",
34      (
42      ),
43  )
44  def test_update_invalid_pk(fresh_db, pk, update_pk):
45      table = fresh_db["table"]
46      table.insert({"id1": 5, "id2": 3, "v": 1}, pk=pk).last_pk
47      with pytest.raises(NotFoundError):
48          table.update(update_pk, {"v": 2})
49  
50  
51  def test_update_alter(fresh_db):
52      table = fresh_db["table"]
53      rowid = table.insert({"foo": "bar"}).last_pk
71  
72  
73  def test_update_alter_with_invalid_column_characters(fresh_db):
74      table = fresh_db["table"]
75      rowid = table.insert({"foo": "bar"}).last_pk
76      with pytest.raises(AssertionError):
77          table.update(rowid, {"new_col[abc]": 1.2}, alter=True)
78  
79  
80  def test_update_with_no_values_sets_last_pk(fresh_db):
81      table = fresh_db.table("dogs", pk="id")
82      table.insert_all([{"id": 1, "name": "Cleo"}, {"id": 2, "name": "Pancakes"}])
85      table.update(2)
86      assert 2 == table.last_pk
87      with pytest.raises(NotFoundError):
88          table.update(3)
89  
90  
91  @pytest.mark.parametrize(
92      "data_structure",
93      (
105     ),
106 )
107 def test_update_dictionaries_and_lists_as_json(fresh_db, data_structure):
108     fresh_db["test"].insert({"id": 1, "data": ""}, pk="id")
109     fresh_db["test"].update(1, {"data": data_structure})
110     row = fresh_db.execute("select id, data from test").fetchone()
111     assert row[0] == 1
112     assert data_structure == json.loads(row[1])

sqlite-utils/tests/test_tracer.py

2   
3   
4   def test_tracer():
5       collected = []
6       db = Database(
31  
32  
33  def test_with_tracer():
34      collected = []
35  

sqlite-utils/tests/test_transform.py

1   from sqlite_utils.db import ForeignKey
2   from sqlite_utils.utils import OperationalError
3   import pytest
4   
5   
6   @pytest.mark.parametrize(
7       "params,expected_sql",
8       [
89      ],
90  )
91  @pytest.mark.parametrize("use_pragma_foreign_keys", [False, True])
92  def test_transform_sql_table_with_primary_key(
93      fresh_db, params, expected_sql, use_pragma_foreign_keys
94  ):
117 
118 
119 @pytest.mark.parametrize(
120     "params,expected_sql",
121     [
162     ],
163 )
164 @pytest.mark.parametrize("use_pragma_foreign_keys", [False, True])
165 def test_transform_sql_table_with_no_primary_key(
166     fresh_db, params, expected_sql, use_pragma_foreign_keys
167 ):
190 
191 
192 def test_transform_sql_with_no_primary_key_to_primary_key_of_id(fresh_db):
193     dogs = fresh_db["dogs"]
194     dogs.insert({"id": 1, "name": "Cleo", "age": "5"})
205 
206 
207 def test_transform_rename_pk(fresh_db):
208     dogs = fresh_db["dogs"]
209     dogs.insert({"id": 1, "name": "Cleo", "age": "5"}, pk="id")
215 
216 
217 def test_transform_not_null(fresh_db):
218     dogs = fresh_db["dogs"]
219     dogs.insert({"id": 1, "name": "Cleo", "age": "5"}, pk="id")
225 
226 
227 def test_transform_remove_a_not_null(fresh_db):
228     dogs = fresh_db["dogs"]
229     dogs.insert({"id": 1, "name": "Cleo", "age": "5"}, not_null={"age"}, pk="id")
235 
236 
237 @pytest.mark.parametrize("not_null", [{"age"}, {"age": True}])
238 def test_transform_add_not_null_with_rename(fresh_db, not_null):
239     dogs = fresh_db["dogs"]
240     dogs.insert({"id": 1, "name": "Cleo", "age": "5"}, pk="id")
246 
247 
248 def test_transform_defaults(fresh_db):
249     dogs = fresh_db["dogs"]
250     dogs.insert({"id": 1, "name": "Cleo", "age": 5}, pk="id")
256 
257 
258 def test_transform_defaults_and_rename_column(fresh_db):
259     dogs = fresh_db["dogs"]
260     dogs.insert({"id": 1, "name": "Cleo", "age": 5}, pk="id")
266 
267 
268 def test_remove_defaults(fresh_db):
269     dogs = fresh_db["dogs"]
270     dogs.insert({"id": 1, "name": "Cleo", "age": 5}, defaults={"age": 1}, pk="id")
276 
277 
278 @pytest.fixture
279 def authors_db(fresh_db):
280     books = fresh_db["books"]
289 
290 
291 def test_transform_foreign_keys_persist(authors_db):
292     assert authors_db["books"].foreign_keys == [
293         ForeignKey(
303 
304 
305 @pytest.mark.parametrize("use_pragma_foreign_keys", [False, True])
306 def test_transform_foreign_keys_survive_renamed_column(
307     authors_db, use_pragma_foreign_keys
308 ):
320 
321 
322 @pytest.mark.parametrize("use_pragma_foreign_keys", [False, True])
323 def test_transform_drop_foreign_keys(fresh_db, use_pragma_foreign_keys):
324     if use_pragma_foreign_keys:
325         fresh_db.conn.execute("PRAGMA foreign_keys=ON")
362 
363 
364 def test_transform_verify_foreign_keys(fresh_db):
365     fresh_db.conn.execute("PRAGMA foreign_keys=ON")
366     fresh_db["authors"].insert({"id": 3, "name": "Tina"}, pk="id")
369     )
370     # Renaming the id column on authors should break everything
371     with pytest.raises(OperationalError) as e:
372         fresh_db["authors"].transform(rename={"id": "id2"})
373     assert e.value.args[0] == 'foreign key mismatch - "books" referencing "authors"'

sqlite-utils/tests/test_sniff.py

1   from sqlite_utils import cli, Database
2   from click.testing import CliRunner
3   import pathlib
4   import pytest
5   
6   sniff_dir = pathlib.Path(__file__).parent / "sniff"
7   
8   
9   @pytest.mark.parametrize("filepath", sniff_dir.glob("example*"))
10  def test_sniff(tmpdir, filepath):
11      db_path = str(tmpdir / "test.db")
12      runner = CliRunner()
13      result = runner.invoke(

sqlite-utils/tests/test_suggest_column_types.py

1   import pytest
2   from collections import OrderedDict
3   from sqlite_utils.utils import suggest_column_types
4   
5   
6   @pytest.mark.parametrize(
7       "records,types",
8       [
25      ],
26  )
27  def test_suggest_column_types(records, types):
28      assert types == suggest_column_types(records)

sqlite-utils/tests/test_rows.py

1   import pytest
2   
3   
4   def test_rows(existing_db):
5       assert [{"text": "one"}, {"text": "two"}, {"text": "three"}] == list(
6           existing_db["foo"].rows
8   
9   
10  @pytest.mark.parametrize(
11      "where,where_args,expected_ids",
12      [
18      ],
19  )
20  def test_rows_where(where, where_args, expected_ids, fresh_db):
21      table = fresh_db["dogs"]
22      table.insert_all(
32  
33  
34  @pytest.mark.parametrize(
35      "where,order_by,expected_ids",
36      [
41      ],
42  )
43  def test_rows_where_order_by(where, order_by, expected_ids, fresh_db):
44      table = fresh_db["dogs"]
45      table.insert_all(
54  
55  
56  @pytest.mark.parametrize(
57      "offset,limit,expected",
58      [
62      ],
63  )
64  def test_rows_where_offset_limit(fresh_db, offset, limit, expected):
65      table = fresh_db["rows"]
66      table.insert_all([{"id": id} for id in range(1, 101)], pk="id")
71  
72  
73  def test_pks_and_rows_where_rowid(fresh_db):
74      table = fresh_db["rowid_table"]
75      table.insert_all({"number": i + 10} for i in range(3))
82  
83  
84  def test_pks_and_rows_where_simple_pk(fresh_db):
85      table = fresh_db["simple_pk_table"]
86      table.insert_all(({"id": i + 10} for i in range(3)), pk="id")
93  
94  
95  def test_pks_and_rows_where_compound_pk(fresh_db):
96      table = fresh_db["compound_pk_table"]
97      table.insert_all(

sqlite-utils/tests/test_register_function.py

1   # flake8: noqa
2   import pytest
3   import sys
4   from unittest.mock import MagicMock
5   
6   
7   def test_register_function(fresh_db):
8       @fresh_db.register_function
9       def reverse_string(s):
14  
15  
16  def test_register_function_multiple_arguments(fresh_db):
17      @fresh_db.register_function
18      def a_times_b_plus_c(a, b, c):
23  
24  
25  def test_register_function_deterministic(fresh_db):
26      @fresh_db.register_function(deterministic=True)
27      def to_lower(s):
32  
33  
34  @pytest.mark.skipif(
35      sys.version_info < (3, 8), reason="deterministic=True was added in Python 3.8"
36  )
37  def test_register_function_deterministic_registered(fresh_db):
38      fresh_db.conn = MagicMock()
39      fresh_db.conn.create_function = MagicMock()
48  
49  
50  def test_register_function_replace(fresh_db):
51      @fresh_db.register_function()
52      def one():

sqlite-utils/tests/test_recreate.py

2   import sqlite3
3   import pathlib
4   import pytest
5   
6   
7   def test_recreate_ignored_for_in_memory():
8       # None of these should raise an exception:
9       Database(memory=True, recreate=False)
13  
14  
15  def test_recreate_not_allowed_for_connection():
16      conn = sqlite3.connect(":memory:")
17      with pytest.raises(AssertionError):
18          Database(conn, recreate=True)
19  
20  
21  @pytest.mark.parametrize(
22      "use_path,file_exists", [(True, True), (True, False), (False, True), (False, False)]
23  )
24  def test_recreate(tmpdir, use_path, file_exists):
25      filepath = str(tmpdir / "data.db")
26      if use_path:

sqlite-utils/tests/test_recipes.py

1   from sqlite_utils import recipes
2   import json
3   import pytest
4   
5   
6   @pytest.fixture
7   def dates_db(fresh_db):
8       fresh_db["example"].insert_all(
18  
19  
20  def test_parsedate(dates_db):
21      dates_db["example"].convert("dt", recipes.parsedate)
22      assert list(dates_db["example"].rows) == [
28  
29  
30  def test_parsedatetime(dates_db):
31      dates_db["example"].convert("dt", recipes.parsedatetime)
32      assert list(dates_db["example"].rows) == [
38  
39  
40  @pytest.mark.parametrize(
41      "recipe,kwargs,expected",
42      (
47      ),
48  )
49  def test_dayfirst_yearfirst(fresh_db, recipe, kwargs, expected):
50      fresh_db["example"].insert_all(
51          [
62  
63  
64  @pytest.mark.parametrize("delimiter", [None, ";", "-"])
65  def test_jsonsplit(fresh_db, delimiter):
66      fresh_db["example"].insert_all(
67          [
84  
85  
86  @pytest.mark.parametrize(
87      "type,expected",
88      (
92      ),
93  )
94  def test_jsonsplit_type(fresh_db, type, expected):
95      fresh_db["example"].insert_all(
96          [

sqlite-utils/tests/test_m2m.py

1   from sqlite_utils.db import ForeignKey, NoObviousTable
2   import pytest
3   
4   
5   def test_insert_m2m_single(fresh_db):
6       dogs = fresh_db["dogs"]
7       dogs.insert({"id": 1, "name": "Cleo"}, pk="id").m2m(
15  
16  
17  def test_insert_m2m_alter(fresh_db):
18      dogs = fresh_db["dogs"]
19      dogs.insert({"id": 1, "name": "Cleo"}, pk="id").m2m(
33  
34  
35  def test_insert_m2m_list(fresh_db):
36      dogs = fresh_db["dogs"]
37      dogs.insert({"id": 1, "name": "Cleo"}, pk="id").m2m(
62  
63  
64  def test_insert_m2m_iterable(fresh_db):
65      iterable_records = ({"id": 1, "name": "Phineas"}, {"id": 2, "name": "Ferb"})
66  
102 
103 
104 def test_m2m_with_table_objects(fresh_db):
105     dogs = fresh_db.table("dogs", pk="id")
106     humans = fresh_db.table("humans", pk="id")
115 
116 
117 def test_m2m_lookup(fresh_db):
118     people = fresh_db.table("people", pk="id")
119     people.insert({"name": "Wahyu"}).m2m("tags", lookup={"tag": "Coworker"})
138 
139 
140 def test_m2m_requires_either_records_or_lookup(fresh_db):
141     people = fresh_db.table("people", pk="id").insert({"name": "Wahyu"})
142     with pytest.raises(AssertionError):
143         people.m2m("tags")
144     with pytest.raises(AssertionError):
145         people.m2m("tags", {"tag": "hello"}, lookup={"foo": "bar"})
146 
147 
148 def test_m2m_explicit_table_name_argument(fresh_db):
149     people = fresh_db.table("people", pk="id")
150     people.insert({"name": "Wahyu"}).m2m(
156 
157 
158 def test_m2m_table_candidates(fresh_db):
159     fresh_db.create_table("one", {"id": int, "name": str}, pk="id")
160     fresh_db.create_table("two", {"id": int, "name": str}, pk="id")
178 
179 
180 def test_uses_existing_m2m_table_if_exists(fresh_db):
181     # Code should look for an existing table with fks to both tables
182     # and use that if it exists.
196 
197 
198 def test_requires_explicit_m2m_table_if_multiple_options(fresh_db):
199     # If the code scans for m2m tables and finds more than one candidate
200     # it should require that the m2m_table=x argument is used
211         foreign_keys=["people_id", "tags_id"],
212     )
213     with pytest.raises(NoObviousTable):
214         people.insert({"name": "Wahyu"}).m2m("tags", lookup={"tag": "Coworker"})

sqlite-utils/tests/test_query.py

2   
3   
4   def test_query(fresh_db):
5       fresh_db["dogs"].insert_all([{"name": "Cleo"}, {"name": "Pancakes"}])
6       results = fresh_db.query("select * from dogs order by name desc")
9   
10  
11  def test_execute_returning_dicts(fresh_db):
12      # Like db.query() but returns a list, included for backwards compatibility
13      # see https://github.com/simonw/sqlite-utils/issues/290
14      fresh_db["test"].insert({"id": 1, "bar": 2}, pk="id")
15      assert fresh_db.execute_returning_dicts("select * from test") == [