home

Menu
  • ripgrep search

ripgrep

Options:

For example *.py or **/templates/**/*.html or datasette/** or !setup.py

datasette-glitch/tests/test_glitch.py

1   from datasette.app import Datasette
2   from datasette_glitch import startup
3   import pytest
4   import httpx
5   
6   
7   @pytest.mark.asyncio
8   async def test_plugin_is_installed():
9       app = Datasette([], memory=True).app()
10      async with httpx.AsyncClient(app=app) as client:
15  
16  
17  def test_startup_with_no_variable(monkeypatch, capsys):
18      monkeypatch.delenv("PROJECT_DOMAIN", raising=False)
19      ds = Datasette([], memory=True)
23  
24  
25  def test_startup_with_variable(monkeypatch, capsys):
26      monkeypatch.setenv("PROJECT_DOMAIN", "test-env")
27      ds = Datasette([], memory=True)
28      startup(ds)
29      captured = capsys.readouterr()
30      assert (
31          "https://test-env.glitch.me/-/auth-token?token={}".format(ds._root_token)
32          == captured.out.strip()
33      )

yaml-to-sqlite/tests/test_cli.py

1   from click.testing import CliRunner
2   from yaml_to_sqlite import cli
3   import sqlite_utils
29  
30  
31  def test_without_pk(tmpdir):
32      db_path = tmpdir / "db.db"
33      assert (
44  
45  
46  def test_with_pk(tmpdir):
47      db_path = tmpdir / "db.db"
48      assert (
59  
60  
61  def test_single_column(tmpdir):
62      db_path = tmpdir / "db.db"
63      test_yaml = "- One\n" "- Two\n" "- Three\n"
64      assert (
65          0
68              cli.cli,
69              [str(db_path), "numbers", "-", "--single-column", "name"],
70              input=test_yaml,
71          )
72          .exit_code
78  
79  
80  def test_alters_if_necessary(tmpdir):
81      db_path = tmpdir / "db.db"
82      assert (

yaml-to-sqlite/setup.py

24      packages=find_packages(),
25      install_requires=["click", "PyYAML", "sqlite-utils>=3.9.1"],
26      setup_requires=["pytest-runner"],
27      extras_require={"test": ["pytest"]},
28      entry_points="""
29          [console_scripts]
30          yaml-to-sqlite=yaml_to_sqlite.cli:cli
31      """,
32      tests_require=["yaml-to-sqlite[test]"],
33      url="https://github.com/simonw/yaml-to-sqlite",
34      classifiers=[

datasette-glitch/setup.py

30      entry_points={"datasette": ["glitch = datasette_glitch"]},
31      install_requires=["datasette>=0.45"],
32      extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]},
33      tests_require=["datasette-glitch[test]"],
34  )

whosonfirst-datasette/Dockerfile

1   FROM datasetteproject/datasette
2   RUN apt update && apt install -y wget bzip2
3   RUN wget -q https://latest.datasette.io/fixtures.db
4   # RUN datasette inspect --inspect-file=/mnt/whosonfirst/inspect.json /fixtures.db /mnt/whosonfirst/whosonfirst-data-latest.db
5   EXPOSE 8001
6   CMD ["datasette", "-h", "0.0.0.0", "-p", "8001", "/mnt/whosonfirst/whosonfirst-data-latest.db", "/fixtures.db", "--cors"]

vaccinate-ca-datasette/metadata.yml

5   about_url: https://github.com/simonw/vaccinate-ca-datasette
6   description_html: |-
7     <a href="/vaccinateca/locations?_facet=Affiliation&_facet=Latest+report+yes%3F&Latest+report+yes%3F=1&_facet_array=Availability+Info">Latest places that reported yes, on a map</a>

twitter-to-sqlite/tests/test_save_tweets.py

2   import pathlib
3   
4   import pytest
5   import sqlite_utils
6   from twitter_to_sqlite import utils
7   
8   
9   @pytest.fixture
10  def tweets():
11      return json.load(open(pathlib.Path(__file__).parent / "tweets.json"))
12  
13  
14  @pytest.fixture
15  def db(tweets):
16      db = sqlite_utils.Database(memory=True)
19  
20  
21  def test_tables(db):
22      assert {
23          "sources",
52  
53  
54  def test_users(db):
55      user_rows = list(db["users"].rows)
56      assert [
184 
185 
186 def test_tweets(db):
187     tweet_rows = list(db["tweets"].rows)
188     assert [
315 
316 
317 def test_sources(db):
318     source_rows = list(db["sources"].rows)
319     assert [
341 
342 
343 def test_places(db):
344     place_rows = list(db["places"].rows)
345     assert [
359 
360 
361 def test_media(db):
362     media_rows = list(db["media"].rows)
363     media_tweets_rows = list(db["media_tweets"].rows)

twitter-to-sqlite/tests/test_migrations.py

1   import sqlite_utils
2   from click.testing import CliRunner
3   import sqlite_utils
4   from twitter_to_sqlite import cli, migrations
5   
6   from .test_import import zip_contents_path
7   from .test_save_tweets import db, tweets
8   
9   
10  def test_no_migrations_on_first_run(tmpdir, zip_contents_path):
11      output = str(tmpdir / "output.db")
12      args = ["import", output, str(zip_contents_path / "follower.js")]
21  
22  
23  def test_convert_source_column():
24      db = sqlite_utils.Database(memory=True)
25      db["tweets"].insert_all(
51  
52  
53  def test_convert_source_column_against_real_database(db):
54      assert "migrations" not in db.table_names()
55      migrations.convert_source_column(db)

twitter-to-sqlite/tests/test_import.py

2   import pathlib
3   
4   import pytest
5   import sqlite_utils
6   from click.testing import CliRunner
7   from twitter_to_sqlite import cli
8   
10  
11  
12  @pytest.fixture
13  def zip_contents_path():
14      return pathlib.Path(__file__).parent / "zip_contents"
15  
16  
17  @pytest.fixture
18  def import_test_zip(tmpdir, zip_contents_path):
19      archive = str(tmpdir / "archive.zip")
20      buf = io.BytesIO()
25  
26  
27  def test_create_zip(zip_contents_path):
28      zf = create_zip(zip_contents_path)
29      assert {
37  
38  
39  def test_cli_import_zip_file(import_test_zip):
40      tmpdir, archive = import_test_zip
41      output = str(tmpdir / "output.db")
42      result = CliRunner().invoke(cli.cli, ["import", output, archive])
46  
47  
48  def test_cli_import_folder(tmpdir, zip_contents_path):
49      output = str(tmpdir / "output.db")
50      result = CliRunner().invoke(cli.cli, ["import", output, str(zip_contents_path)])
54  
55  
56  def test_cli_import_specific_files(tmpdir, zip_contents_path):
57      output = str(tmpdir / "output.db")
58      result = CliRunner().invoke(
109 
110 
111 def test_deletes_existing_archive_tables(import_test_zip):
112     tmpdir, archive = import_test_zip
113     output = str(tmpdir / "output.db")
114     db = sqlite_utils.Database(output)

twitter-to-sqlite/setup.py

37          "python-dateutil",
38      ],
39      extras_require={"test": ["pytest"]},
40      tests_require=["twitter-to-sqlite[test]"],
41  )

til/typescript/basic-tsc.md

22      % ./node_modules/.bin/tsc --init
23  
24  Next step: create a `.ts` file to start testing it out. I put the following in `greetings.ts`:
25  
26  ```typescript

til/sqlite/track-timestamped-changes-to-a-table.md

75  INSERT INTO foo VALUES ('hello4');
76  ```
77  To test this I ran `sqlite3` (with no arguments, which provides an in-memory database to play with), pasted in the above and then ran this:
78  ```
79  sqlite> .headers on

til/sqlite/text-value-is-integer-or-float.md

17  The `|| '.0'` bit there is needed because `cast('1' as REAL)` returns `1.0`, not just `1`.
18  
19  (Note that `1.200` will not pass this test and will be incorrectly considered an invalid floating point representation)
20  
21  ## Demos
42    )
43  ```
44  [Try that here](https://latest.datasette.io/fixtures?sql=select%0D%0A++value%2C%0D%0A++cast%28cast%28value+AS+REAL%29+AS+TEXT%29+in+%28value%2C+value+%7C%7C+%27.0%27%29+as+is_valid_float%0D%0Afrom%0D%0A++%28%0D%0A++++select%0D%0A++++++%271%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++%271.1%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++%27dog%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++null+as+value%0D%0A++%29)
45  
46  | value | is_valid_float |
71    )
72  ```
73  [Try that here](https://latest.datasette.io/fixtures?sql=select%0D%0A++value%2C%0D%0A++cast%28cast%28value+AS+INTEGER%29+AS+TEXT%29+%3D+value+as+is_valid_int%0D%0Afrom%0D%0A++%28%0D%0A++++select%0D%0A++++++%271%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++%271.1%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++%27dog%27+as+value%0D%0A++++union%0D%0A++++select%0D%0A++++++null+as+value%0D%0A++%29)
74  
75  | value | is_valid_int |

til/sqlite/simple-recursive-cte.md

10  select * from counter limit 5;
11  ```
12  This query [returns five rows](https://latest.datasette.io/_memory?sql=with+recursive+counter%28x%29+as+%28%0D%0A++select+0%0D%0A++++union%0D%0A++select+x+%2B+1+from+counter%0D%0A%29%0D%0Aselect+*+from+counter+limit+10%3B) from a single column `x` - from 0 to 4.
13  
14  |   x |

til/sqlite/splitting-commas-sqlite.md

25  | x12 | y1234 | z12345 |
26  
27  Here's [a live demo of the query](https://latest.datasette.io/fixtures?sql=with+comma_locations+as+%28%0D%0A++select+instr%28%3Apath%2C+%27%2C%27%29+as+first_comma%2C%0D%0A++instr%28%3Apath%2C+%27%2C%27%29+%2B+instr%28substr%28%3Apath%2C+instr%28%3Apath%2C+%27%2C%27%29+%2B+1%29%2C+%27%2C%27%29+as+second_comma%0D%0A%29%2C+variables+as+%28%0D%0A++select%0D%0A++++substr%28%3Apath%2C+0%2C+first_comma%29+as+first%2C%0D%0A++++substr%28%3Apath%2C+first_comma+%2B+1%2C+second_comma+-+first_comma+-+1%29+as+second%2C%0D%0A++++substr%28%3Apath%2C+second_comma+%2B+1%29+as+third%0D%0A++from+comma_locations%0D%0A%29%0D%0Aselect+*+from+variables&path=x12%2Cy1234%2Cz12345).

til/sqlite/replicating-rqlite.md

5   By default `rqlite` asks you to use its own custom HTTP API - but I wanted to try running it against Datasette. rqlite author Philip O'Toole confirmed that [this should work](https://twitter.com/general_order24/status/1343619601758908419) provided any writes go through the API - each node can be configured to write to an on-disk database file which Datasette can then read from (the default is to use in-memory databases and an on-disk Raft log).
6   
7   Here's how I got that working on my macOS laptop. I used the latest macOS binary from https://github.com/rqlite/rqlite/releases (`rqlite` is written in Go and provides pre-complied binaries for different systems).
8   
9       cd /tmp

til/sqlite/one-line-csv-operations.md

13        'SELECT passenger_count, COUNT(*), AVG(total_amount) FROM taxi GROUP BY passenger_count'
14  
15  You can get `taxi.csv` by downloading the compressed file from [here](https://github.com/multiprocessio/dsq/blob/43e72ff1d2c871082fed0ae401dd59e2ff9f6cfe/testdata/taxi.csv.7z) and running:
16  
17      7z e -aos taxi.csv.7z

til/sqlite/list-all-columns-in-a-database.md

3   Here's a devious trick for listing ALL columns in a SQLite database, using a SQL query that generates another SQL query.
4   
5   The first query ([demo](https://latest.datasette.io/fixtures?sql=select+group_concat%28%0D%0A++%22select+%27%22+%7C%7C+name+%7C%7C+%22%27+as+table_name%2C+*+from+pragma_table_info%28%27%22+%7C%7C+name+%7C%7C+%22%27%29%22%0D%0A%2C+%27+union+%27%29+%7C%7C+%27+order+by+table_name%2C+cid%27%0D%0A++from+sqlite_master+where+type+%3D+%27table%27%3B)):
6   
7   ```sql
11    from sqlite_master where type = 'table';
12  ```
13  This outputs the second query, which will look something like this ([demo](https://latest.datasette.io/fixtures?sql=select+%27simple_primary_key%27+as+table_name%2C+*+from+pragma_table_info%28%27simple_primary_key%27%29+union+select+%27primary_key_multiple_columns%27+as+table_name%2C+*+from+pragma_table_info%28%27primary_key_multiple_columns%27%29+union+select+%27primary_key_multiple_columns_explicit_label%27+as+table_name%2C+*+from+pragma_table_info%28%27primary_key_multiple_columns_explicit_label%27%29+union+select+%27compound_primary_key%27+as+table_name%2C+*+from+pragma_table_info%28%27compound_primary_key%27%29+union+select+%27compound_three_primary_keys%27+as+table_name%2C+*+from+pragma_table_info%28%27compound_three_primary_keys%27%29+union+select+%27foreign_key_references%27+as+table_name%2C+*+from+pragma_table_info%28%27foreign_key_references%27%29+union+select+%27sortable%27+as+table_name%2C+*+from+pragma_table_info%28%27sortable%27%29+union+select+%27no_primary_key%27+as+table_name%2C+*+from+pragma_table_info%28%27no_primary_key%27%29+union+select+%27123_starts_with_digits%27+as+table_name%2C+*+from+pragma_table_info%28%27123_starts_with_digits%27%29+union+select+%27Table+With+Space+In+Name%27+as+table_name%2C+*+from+pragma_table_info%28%27Table+With+Space+In+Name%27%29+union+select+%27table%2Fwith%2Fslashes.csv%27+as+table_name%2C+*+from+pragma_table_info%28%27table%2Fwith%2Fslashes.csv%27%29+union+select+%27complex_foreign_keys%27+as+table_name%2C+*+from+pragma_table_info%28%27complex_foreign_keys%27%29+union+select+%27custom_foreign_key_label%27+as+table_name%2C+*+from+pragma_table_info%28%27custom_foreign_key_label%27%29+union+select+%27units%27+as+table_name%2C+*+from+pragma_table_info%28%27units%27%29+union+select+%27tags%27+as+table_name%2C+*+from+pragma_table_info%28%27tags%27%29+union+select+%27searchable%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable%27%29+union+select+%27searchable_tags%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_tags%27%29+union+select+%27searchable_fts%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_fts%27%29+union+select+%27searchable_fts_content%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_fts_content%27%29+union+select+%27searchable_fts_segments%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_fts_segments%27%29+union+select+%27searchable_fts_segdir%27+as+table_name%2C+*+from+pragma_table_info%28%27searchable_fts_segdir%27%29+union+select+%27select%27+as+table_name%2C+*+from+pragma_table_info%28%27select%27%29+union+select+%27infinity%27+as+table_name%2C+*+from+pragma_table_info%28%27infinity%27%29+union+select+%27facet_cities%27+as+table_name%2C+*+from+pragma_table_info%28%27facet_cities%27%29+union+select+%27facetable%27+as+table_name%2C+*+from+pragma_table_info%28%27facetable%27%29+union+select+%27binary_data%27+as+table_name%2C+*+from+pragma_table_info%28%27binary_data%27%29+union+select+%27roadside_attractions%27+as+table_name%2C+*+from+pragma_table_info%28%27roadside_attractions%27%29+union+select+%27attraction_characteristic%27+as+table_name%2C+*+from+pragma_table_info%28%27attraction_characteristic%27%29+union+select+%27roadside_attraction_characteristics%27+as+table_name%2C+*+from+pragma_table_info%28%27roadside_attraction_characteristics%27%29+order+by+table_name%2C+cid)):
14  ```sql
15  select 'simple_primary_key' as table_name, * from pragma_table_info('simple_primary_key') union
156   sqlite_master.name
157 ```
158 [Demo](https://latest.datasette.io/fixtures?sql=select%0D%0A++sqlite_master.name+as+table_name%2C%0D%0A++table_info.*%0D%0Afrom%0D%0A++sqlite_master%0D%0A++join+pragma_table_info%28sqlite_master.name%29+as+table_info%0D%0Aorder+by%0D%0A++sqlite_master.name%2C%0D%0A++table_info.cid).
159 
160 This works with the `pragma_table_info` and `pragma_index_list` and `pragma_foreign_key_list` functions too.
178   columnName
179 ```
180 [Demo](https://latest.datasette.io/fixtures?sql=SELECT+m.name+as+tableName%2C+p.name+as+columnName%0D%0A++++++++FROM+sqlite_master+m%0D%0A++++++++LEFT+OUTER+JOIN+pragma_table_info%28%28m.name%29%29+p+ON+m.name+%3C%3E+p.name%0D%0A++++++++WHERE+m.type+IN+%28%27table%27%2C%27view%27%29+AND+m.name+NOT+LIKE+%27sqlite_%25%27%0D%0A++++++++ORDER+BY+tableName%2C+columnName).

til/sqlite/ld-preload.md

44      cd SQLite-cf538e27
45  
46  Now we can build the extension. The `CPPFLAGS` are optional but I found I needed them to get the full Datasette test suite to pass later on:
47  
48      CPPFLAGS="-DSQLITE_ENABLE_FTS3 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_RTREE=1" ./configure
142 ```
143 
144 ## Running the Datasette tests
145 
146 To run Datasette's test suite I needed to install a few extra dependencies:
147 
148 ```
154 source venv/bin/activate
155 pip install wheel # So bdist_wheel works in next step
156 pip install -e '.[test]'
157 LD_PRELOAD=/tmp/SQLite-cf538e27/.libs/libsqlite3.so python3 -c 
158 ```

til/sqlite/json-extract-path.md

32  }
33  ```
34  - `$.creatures` returns the JSON array ([demo](https://latest.datasette.io/_memory?sql=select+json_extract%28%27%7B%0D%0A++++%22creatures%22%3A+%5B%0D%0A++++++++%7B%0D%0A++++++++++++%22name%22%3A+%22Cleo%22%2C%0D%0A++++++++++++%22species%22%3A+%22dog%22%0D%0A++++++++%7D%2C%0D%0A++++++++%7B%0D%0A++++++++++++%22name%22%3A+%22Azi%22%2C%0D%0A++++++++++++%22species%22%3A+%22chicken%22%2C%0D%0A++++++++++++%22weight.lb%22%3A+1.6%0D%0A++++++++%7D%0D%0A++++%5D%0D%0A%7D%27%2C+%3Apath%29&path=%24.creatures))
35  - `$.creatures[0].name` returns `Cleo` ([demo](https://latest.datasette.io/_memory?sql=select+json_extract%28%27%7B%0D%0A++++%22creatures%22%3A+%5B%0D%0A++++++++%7B%0D%0A++++++++++++%22name%22%3A+%22Cleo%22%2C%0D%0A++++++++++++%22species%22%3A+%22dog%22%0D%0A++++++++%7D%2C%0D%0A++++++++%7B%0D%0A++++++++++++%22name%22%3A+%22Azi%22%2C%0D%0A++++++++++++%22species%22%3A+%22chicken%22%2C%0D%0A++++++++++++%22weight.lb%22%3A+1.6%0D%0A++++++++%7D%0D%0A++++%5D%0D%0A%7D%27%2C+%3Apath%29&path=%24.creatures%5B0%5D.name))
36  - `$.creatures[1]."weight.lb"` returns `1.6` ([demo](https://latest.datasette.io/_memory?sql=select+json_extract%28%27%7B%0D%0A++++%22creatures%22%3A+%5B%0D%0A++++++++%7B%0D%0A++++++++++++%22name%22%3A+%22Cleo%22%2C%0D%0A++++++++++++%22species%22%3A+%22dog%22%0D%0A++++++++%7D%2C%0D%0A++++++++%7B%0D%0A++++++++++++%22name%22%3A+%22Azi%22%2C%0D%0A++++++++++++%22species%22%3A+%22chicken%22%2C%0D%0A++++++++++++%22weight.lb%22%3A+1.6%0D%0A++++++++%7D%0D%0A++++%5D%0D%0A%7D%27%2C+%3Apath%29&path=%24.creatures%5B1%5D.%22weight.lb%22))
37  
38  ## \#-1 to access arrays by index from the end
44  But... you can apply a single integer subtraction operation to that `#` - so you can return the name of the last creature in the array using this:
45  
46  - `$.creatures[#-1].name` returns `Azi` ([demo](https://latest.datasette.io/_memory?sql=select+json_extract%28%27%7B%0D%0A++++%22creatures%22%3A+%5B%0D%0A++++++++%7B%0D%0A++++++++++++%22name%22%3A+%22Cleo%22%2C%0D%0A++++++++++++%22species%22%3A+%22dog%22%0D%0A++++++++%7D%2C%0D%0A++++++++%7B%0D%0A++++++++++++%22name%22%3A+%22Azi%22%2C%0D%0A++++++++++++%22species%22%3A+%22chicken%22%2C%0D%0A++++++++++++%22weight.lb%22%3A+1.6%0D%0A++++++++%7D%0D%0A++++%5D%0D%0A%7D%27%2C+%3Apath%29&path=%24.creatures%5B%23-1%5D.name))
47  
48  Here's [the commit](https://sqlite.org/src/info/35ed68a651f) that added that custom SQLite extension in 2019.
54  `$.has\" quotes in it`
55  
56  For example ([demo](https://latest.datasette.io/_memory?sql=select+json_extract(%27%7B%0D%0A++++%22has%5C%22+quotes+in+it%22:+%22hello%22%0D%0A%7D%27,+%27$.has%5C%22+quotes+in+it%27)&path=$.has%5C%22+quotes+in+it)):
57  
58  ```sql
65  ## Source code
66  
67  The latest source code for the JSON module can be found in [ext/misc/json.c](https://www3.sqlite.org/src/file?name=ext/misc/json.c) - in particular the `static JsonNode *jsonLookup(...)
68  ` function.
69  
70  The unit tests are really useful - those are spread across these six files:
71  
72  - [test/json1.test](https://www3.sqlite.org/src/file?name=test/json1.test)
73  - [test/json101.test](https://www3.sqlite.org/src/file?name=test/json101.test)
74  - [test/json102.test](https://www3.sqlite.org/src/file?name=test/json102.test)
75  - [test/json103.test](https://www3.sqlite.org/src/file?name=test/json103.test)
76  - [test/json104.test](https://www3.sqlite.org/src/file?name=test/json104.test)
77  - [test/json105.test](https://www3.sqlite.org/src/file?name=test/json105.test) - this one has the tests for `[#]` syntax.

til/sqlite/build-specific-sqlite-pysqlite-macos.md

1   # Building a specific version of SQLite with pysqlite on macOS/Linux
2   
3   I wanted the ability to test my Python software against specific version of SQLite on macOS. I found a way to do that using [pysqlite3](https://github.com/coleifer/pysqlite3).
4   
5   First, clone the GitHub mirror of SQLite (so I don't have to learn how to use Fossil):
31      python3 setup.py build_static build
32  
33  The end result sits in  a `pysqlite3` folder in, on my machine, `/tmp/pysqlite3/build/lib.macosx-10.15-x86_64-3.9` - test it like this:
34  
35      cd /tmp/pysqlite3/build/lib.macosx-10.15-x86_64-3.9
54      pip install /tmp/pysqlite3/dist/pysqlite3-0.4.6-cp39-cp39-macosx_10_15_x86_64.whl
55  
56  This exact same process works on Linux too (tested inside a default GitHub Actions Linux worker).

til/sqlite/blob-literals.md

7   ```
8   
9   This was while writing a unit test for `datasette-media` - for [issue #19](https://github.com/simonw/datasette-media/issues/19). I used it in the test [here](https://github.com/simonw/datasette-media/blob/2cf64d949ccb8cd5f34b24aeb41b2a91de14cdd2/tests/test_media.py#L292-L295).
10  
11  The SQLite documentation for [Literal values](https://www.sqlite.org/lang_expr.html#literal_values_constants_) explains how to do this:

til/sqlite/column-combinations.md

32    num_rows desc
33  ```
34  [Try that here](https://latest.datasette.io/fixtures?sql=select%0D%0A++++case+when+%5Bpk%5D+is+not+null+then+%27pk%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bcreated%5D+is+not+null+then+%27created%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bplanet_int%5D+is+not+null+then+%27planet_int%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bon_earth%5D+is+not+null+then+%27on_earth%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bstate%5D+is+not+null+then+%27state%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5B_city_id%5D+is+not+null+then+%27_city_id%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5B_neighborhood%5D+is+not+null+then+%27_neighborhood%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Btags%5D+is+not+null+then+%27tags%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bcomplex_array%5D+is+not+null+then+%27complex_array%2C+%27+else+%27%27+end+%7C%7C%0D%0A++++case+when+%5Bdistinct_some_null%5D+is+not+null+then+%27distinct_some_null%2C+%27+else+%27%27+end%0D%0A++as+columns%2C%0D%0A++count%28*%29+as+num_rows%0D%0Afrom%0D%0A++%5Bfacetable%5D%0D%0Agroup+by%0D%0A++columns%0D%0Aorder+by%0D%0A++num_rows+desc).
35  
36  This has the desired effect: it gives me back all of the combinations of not-null columns in the table, with a count for each one.
53    num_rows desc' as query from pragma_table_info(:table)
54  ```
55  [Try that out](https://latest-with-plugins.datasette.io/fixtures?sql=select+%27select%0D%0A%27+%7C%7C+group_concat%28%27++++case+when+%5B%27+%7C%7C+name+%7C%7C+%27%5D+is+not+null+then+%27+%7C%7C+quote%28name+%7C%7C+%27%2C+%27%29+%7C%7C+%27+else+%27%27%27%27+end%27%2C+%27+%7C%7C%0D%0A%27%29+%7C%7C+%27%0D%0A++as+columns%2C%0D%0A++count%28*%29+as+num_rows%0D%0Afrom%0D%0A++%5B%27+%7C%7C+%3Atable+%7C%7C+%27%5D%0D%0Agroup+by%0D%0A++columns%0D%0Aorder+by%0D%0A++num_rows+desc%27+as+query+from+pragma_table_info%28%3Atable%29&table=facetable) in a demo that includes the [datasette-query-links](https://datasette.io/plugins/datasette-query-links) plugin.
56  This takes `:table` as an input and generates SQL which can be used to generate column-combination counts.

til/sphinx/sphinx-autodoc.md

1   # Adding Sphinx autodoc to a project, and configuring Read The Docs to build it
2   
3   I built a [new API reference page](https://sqlite-utils.datasette.io/en/latest/reference.html) today for `sqlite-utils`, using the Sphinx [autodoc extension](https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html) to extract docstrings from the code and use them to build a full class reference.
4   
5   I've avoided this kind of documentation in the past because I think narrative prose is a *much* better way of providing documentation - but ``sqlite-utils`` already has [detailed narrative prose](https://sqlite-utils.datasette.io/en/stable/python-api.html), so I felt that adding reference documentation powered by docstrings could enhance that project - while also providing better inline document for tools such as Visual Studio Code and Jupyter.
143 ## The end result
144 
145 The new page of documentation is now live at [en/latest/reference.html](https://sqlite-utils.datasette.io/en/latest/reference.html). The pull request in which I figured this all out is [sqlite-utils/pull/312](https://github.com/simonw/sqlite-utils/pull/312).

til/sphinx/blacken-docs.md

23  
24  ```python
25  @pytest.fixture
26  def datasette(tmp_path_factory):
27      # This fixture will be executed repeatedly for every test
28  ```
29  This is because of the missing function body. It turns out adding `...` (which looks prettier than `pass`) fixes this issue:
30  ```python
31  @pytest.fixture
32  def datasette(tmp_path_factory):
33      # This fixture will be executed repeatedly for every test
34      ...
35  ```

til/spatialite/minimal-spatialite-database-in-python.md

1   # Creating a minimal SpatiaLite database with Python
2   
3   When writing a test for [datasette-leaflet-freedraw](https://github.com/simonw/datasette-leaflet-freedraw) I realized I didn't have a simple tiny recipe for creating an in-memory SpatiaLite database in Python. I came up with this:
4   
5   ```python
26  /usr/local/lib/mod_spatialite.dylib
27  ```
28  I also remembered I have this script: [build_small_spatialite_db.py](https://github.com/simonw/datasette/blob/main/tests/build_small_spatialite_db.py)

til/spatialite/knn.md

1   # KNN queries with SpatiaLite
2   
3   The latest version of SpatiaLite adds KNN support, which makes it easy to efficiently answer the question "what are the X closest records to this point".
4   
5   The USGS earthquakes GeoJSON is a great dataset for experimenting with these features.
9   `https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson` currently contains 10,642 features.
10  
11  To turn that into a SpatiaLite database using the latest version of [geojson-to-sqlite](https://github.com/simonw/geojson-to-sqlite):
12  ```bash
13  curl 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson' | \

til/selenium/selenium-python-macos.md

84  print(firefox.find_element_by_css_selector('body').text)
85  ```
86  I used `wget` for the download (rather than clicking the link in my browser) thanks to the warning here: https://firefox-source-docs.mozilla.org/testing/geckodriver/Notarization.html
87  
88  An easier option: install it with Homebrew:

til/readthedocs/link-from-latest-to-stable.md

1   # Linking from /latest/ to /stable/ on Read The Docs
2   
3   [Read The Docs](https://readthedocs.org/) has a handy feature where documentation for older versions will automatically link to the latest release, for example [on this page](https://docs.datasette.io/en/0.56/spatialite.html):
4   
5   <img width="978" alt="A documentation page with a note that says: You are not reading the most recent version of this documentation. 0.60 is the latest version available." src="https://user-images.githubusercontent.com/9599/150437341-14554fe7-1c47-4462-a1d9-9b8d822aaea8.png">
6   
7   That feature is enabled by a "Show version warning" check box in their Advanced Settings preference pane.
9   It's implemented by [this JavaScript](https://github.com/readthedocs/readthedocs.org/blob/0852d7c10d725d954d3e9a93513171baa1116d9f/readthedocs/core/static-src/core/js/doc-embed/version-compare.js#L13-L21) in their default theme, called [from here](https://github.com/readthedocs/readthedocs.org/blob/bc3e147770e5740314a8e8c33fec5d111c850498/readthedocs/core/static-src/core/js/doc-embed/footer.js#L66-L86).
10  
11  I had an extra requirement: I wanted pages on my `/en/latest/` documentation (which shows documentation for the in-development `main` branch on GitHub) to link back to the `/en/stable/` equivalent - but only if that page also existed in the stable documentation.
12  
13  I ended up [adding this snippet](https://github.com/simonw/datasette/commit/ffca55dfd7cc9b53522c2e5a2fa1ff67c9beadf2) of jQuery JavaScript to my custom ` docs/_templates/layout.html` template:
18  <script>
19  jQuery(function ($) {
20    // Show banner linking to /stable/ if this is a /latest/ page
21    if (!/\/latest\//.test(location.pathname)) {
22      return;
23    }
24    var stableUrl = location.pathname.replace("/latest/", "/stable/");
25    // Check it's not a 404
26    fetch(stableUrl, { method: "HEAD" }).then((response) => {
47  {% endblock %}
48  ```
49  The neatest piece of this solution is the way it uses an HTTP `HEAD` request via `fetch()` to confirm that the equivalent stable page exists before adding a link to it:
50  ```javascript
51    var stableUrl = location.pathname.replace("/latest/", "/stable/");
52    // Check it's not a 404
53    fetch(stableUrl, { method: "HEAD" }).then((response) => {
56  ```
57  
58  Here's what my fix looks like, running on https://docs.datasette.io/en/latest/csv_export.html
59  
60  <img width="978" alt="This page has a banner that says:  This documentation covers the development version of Datasette. See this page for the current stable release." src="https://user-images.githubusercontent.com/9599/150438021-0ab3db8f-7f65-4846-b2d4-880e10dce79d.png">
62  ## Alternative solution: sphinx-version-warning
63  
64  Just minutes after I committed my fix I was informed of the existence of [sphinx-version-warning](https://sphinx-version-warning.readthedocs.io/en/latest/), a Sphinx plugin that can solve this problem too. There's an example of using that to add a message to the `/latest/` page in [its own documentation configuration here](https://github.com/humitos/sphinx-version-warning/blob/a82156c2ea08e5feab406514d0ccd9d48a345f48/docs/conf.py#L32-L38).
65  
66  ```python
67  # -- Version Warning Banner configuration ------------------------------------
68  versionwarning_messages = {
69      'latest': 'This is a custom message only for version "latest" of this documentation.',
70  }
71  versionwarning_admonition_type = 'tip'
75  I decided to stick with my version, mainly because I like the `fetch()` solution I used.
76  
77  GitHub issue: [ Documentation should clarify /stable/ vs /latest/ #1608](https://github.com/simonw/datasette/issues/1608)

til/readthedocs/documentation-seo-canonical.md

3   I was thinking about documentation SEO today. Like many projects, Datasette offers multiple versions of the documentation:
4   
5   - https://docs.datasette.io/en/latest/ is the latest `main` branch on GitHub
6   - https://docs.datasette.io/en/stable/ is the most recent stable (non alpha or beta) release - currently 0.60
7   - https://docs.datasette.io/en/0.59.4/ is the documentation for that specific version - I have more than 70 of those now
23  <link rel="canonical" href="https://docs.datasette.io/en/stable/introspection.html" />
24  ```
25  Here's [their documentation](https://docs.readthedocs.io/en/latest/custom_domains.html#canonical-urls) covering this feature. I think you need to have configured a "default version" (though they may set a sensible default for that already) - for my project the page for doing that is the Advanced settings page at https://readthedocs.org/dashboard/datasette/advanced/
26  
27  This TIL started life as [a Twitter thread](https://twitter.com/simonw/status/1484287724773203971).

til/pytest/treat-warnings-as-errors.md

1   # Treating warnings as errors in pytest
2   
3   I was seeing this warning in a Django project when I thought I was correctly using timezone-aware dates everywhere:
5   > RuntimeWarning: DateTimeField Shift.shift_start received a naive datetime (2022-04-01 00:00:00) while time zone support is active
6   
7   Running `pytest -Werror` turns those warnings into errors that fail the tests.
8   
9   Which means you can investigate them in the Python debugger by running:
10  
11      pytest -Werror --pdb -x
12  
13  The `--pdb` starts the debugger at the warning (now error) and the `-x` stops the tests after the first failure.
14  
15  ## In pytest.ini
16  
17  You can also set this in `pytest.ini` - useful if you want ALL warnings to be failures in both development and CI.
18  
19  Add the following to the `pytest.ini` file:
20  
21  ```ini
22  [pytest]
23  # ...
24  filterwarnings =
31  
32  ```ini
33  [pytest]
34  # ...
35  filterwarnings =

til/python/packaging-pyinstaller.md

67  I solved this by adding each `ModuleNotFoundError` module to `--hidden-import` until it worked.
68  
69  I've tested this script (and the generated executables) on both macOS and Ubuntu Linux so far, and it's worked perfectly in both cases. See [issue 93](https://github.com/simonw/datasette/issues/93) for more details.

til/python/introspect-function-parameters.md

18  ```
19  
20  And here's an illustrative unit test:
21  
22  ```python
23  def test_call_with_supported_arguments():
24      def foo(a, b):
25          return "{}+{}".format(a, b)
28      assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2, c=3)
29  
30      with pytest.raises(TypeError):
31          utils.call_with_supported_arguments(foo, a=1)
32  ```

til/python/installing-upgrading-plugins-with-pipx.md

56      ]
57  
58  I added all of this to the Datasette docs here: https://docs.datasette.io/en/latest/installation.html#using-pipx
59  (see https://github.com/simonw/datasette/issues/756).

til/python/generate-nested-json-summary.md

1   # Generated a summary of nested JSON data
2   
3   I was trying to figure out the shape of the JSON object from https://github.com/simonw/coronavirus-data-gov-archive/blob/master/data_latest.json?raw=true - which is 3.2MB and heavily nested, so it's difficult to get a good feel for the shape.
4   
5   I solved this with a Python `summarize()` function which recursively truncates the nested lists and dictionaries.
28  import json, requests
29  data = requests.get(
30      "https://github.com/simonw/coronavirus-data-gov-archive/blob/master/data_latest.json?raw=true"
31  ).json()
32  print(json.dumps(summarize(data, list_limit=2, key_limit=7), indent=4))

til/python/cog-to-update-help-in-readme.md

11  import cog
12  from csvs_to_sqlite import cli
13  from click.testing import CliRunner
14  runner = CliRunner()
15  result = runner.invoke(cli.cli, ["--help"])
37  ## Testing with cog --check
38  
39  A version of Cog released after I first wrote this TIL added a new `--check` option, so you can run a test in CI to check if the file needs to be updated using:
40  
41      cog --check README.md
42  
43  ## Writing a test (before cog --check)
44  
45  Any time I generate content like this in a repo I like to include a test that will fail if I forget to update the content.
46  
47  `cog` clearly isn't designed to be used as an independent library, but I came up with the following pattern `pytest` test which works well, in my `tests/test_csvs_to_sqlite.py` module:
48  
49  ```python
54  
55  
56  def test_if_cog_needs_to_be_run():
57      _stdout = sys.stdout
58      sys.stdout = StringIO()
77  Cog then writes the generated output to `stdout` - which I capture with that `sys.stdout` trick.
78  
79  Finally, I compare the generated output to the current file content and fail the test with a reminder to run `cog -r` if they do not match.
80  
81  ## Cog for reStructuredText

til/python/codespell.md

51  jobs:
52    spellcheck:
53      runs-on: ubuntu-latest
54      steps:
55      - uses: actions/checkout@v2

til/python/call-pip-programatically.md

3   I needed this for the `datasette install` and `datasette uninstall` commands, see [issue #925](https://github.com/simonw/datasette/issues/925).
4   
5   My initial attempt at this resulted in weird testing errors ([#928](https://github.com/simonw/datasette/issues/928)) - while investigating them I stumbled across [this comment](https://github.com/pypa/pip/blob/e060970d51c5946beac8447eb95585d83019582d/src/pip/_internal/cli/main.py#L23-L47) in the `pip` source code:
6   
7   ```
41       run_module("pip", run_name="__main__") 
42  ```
43  And here's how I wrote [a unit test](https://github.com/simonw/datasette/blob/afdeda8216d4d3027f87583ccdbef17ad85022ef/tests/test_cli.py#L114-L124) for it:
44  ```python
45  @mock.patch("datasette.cli.run_module")
46  def test_install(run_module):
47      runner = CliRunner()
48      runner.invoke(cli, ["install", "datasette-mock-plugin", "datasette-mock-plugin2"])

til/pytest/test-click-app-with-streaming-input.md

1   # Testing a Click app with streaming input
2   
3   For [sqlite-utils#364](https://github.com/simonw/sqlite-utils/issues/364) I needed to write a test for a [Click](https://click.palletsprojects.com/) app which dealt with input streamed to standard input. I needed to run some assertions during that process, which ruled out the usual [CliRunner.invoke()](https://click.palletsprojects.com/en/8.0.x/testing/) testing tool since that works by running the command until completion.
4   
5   I decided to use `subprocess` to run the application. Here's the pattern I came up with for the test:
6   ```python
7   def test_insert_streaming_batch_size_1(db_path):
8       # https://github.com/simonw/sqlite-utils/issues/364
9       # Streaming with --batch-size 1 should commit on each record
46  I realized I needed to call `proc.stdin.flush()` after each write to ensure the write was pushed to the process in a predictable manner.
47  
48  At the end of the test, running `proc.stdin.close()` is equivalent to sending an end-of-file, then `proc.wait()` ensures the process has finished and terminated.

til/pytest/subprocess-server.md

1   # Start a server in a subprocess during a pytest session
2   
3   I wanted to start an actual server process, run it for the duration of my pytest session and shut it down at the end.
4   
5   Here's the recipe I came up with. This fixture lives in `conftest.py`:
6   
7   ```python
8   import pytest
9   import sqlite_utils
10  import subprocess
11  
12  @pytest.fixture(scope="session")
13  def ds_server(tmp_path_factory):
14      db_directory = tmp_path_factory.mktemp("dbs")
15      db_path = db_directory / "test.db"
16      db = sqlite_utils.Database(db_path)
17      insert_test_data(db)
18      ds_proc = subprocess.Popen(
19          [
31      assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8")
32      yield ds_proc
33      # Shut it down at the end of the pytest session
34      ds_proc.terminate()
35  ```
36  A test looks like this:
37  ```python
38  import httpx
39  
40  def test_server_starts(ds_server):
41      response = httpx.get("http://127.0.0.1:8041/")
42      assert response.status_code == 200

til/pytest/session-scoped-tmp.md

1   # Session-scoped temporary directories in pytest
2   
3   I habitually use the `tmpdir` fixture in pytest to get a temporary directory that will be cleaned up after each test, but that doesn't work with `scope="session"` - which can be used to ensure an expensive fixture is run only once per test session and the generated content is used for multiple tests.
4   
5   To get a temporary directory that works with `scope="session"`, use the `tmp_path_factory` built-in pytest fixture like this:
6   
7   ```python
8   import pytest
9   
10  
11  @pytest.fixture(scope="session")
12  def template_dir(tmp_path_factory):
13      template_dir = tmp_path_factory.mktemp("page-templates")
19  
20  
21  def test_about(template_dir):
22      assert "ABOUT!" == (template_dir / "pages" / "about.html").read_text()
23  
24  
25  def test_request(template_dir):
26      assert "request" == (template_dir / "pages" / "request.html").read_text()
27  ```
28  
29  Example: https://github.com/simonw/datasette/blob/1b7b66c465e44025ec73421bd69752e42f108321/tests/test_custom_pages.py#L16-L45

til/pytest/registering-plugins-in-tests.md

1   # Registering temporary pluggy plugins inside tests
2   
3   While implementing more finely-grained permissions for `datasette-insert-api` ([issue 8](https://github.com/simonw/datasette-insert-api/issues/8)) I decided I wanted to register a Datasette pluggy plugin for the duration of a single test.
4   
5   Here's the pattern I figured out for doing that:
8   from datasette import hookimpl
9   from datasette.plugins import pm
10  import pytest
11  
12  
13  def test_using_test_plugin():
14      class TestPlugin:
15          __name__ = "TestPlugin"
22      pm.register(TestPlugin(), name="undo")
23      try:
24          # Rest of test goes here
25      finally:
26          pm.unregister(name="undo")
27  ```
28  
29  Here's [an example](https://github.com/simonw/datasette-insert/blob/7f4c2b3954190d547619d043bbe714481b10ac1e/tests/test_insert_api.py) of a test that uses a pytest fixture to register (and de-register) a plugin:
30  
31  ```python
33  from datasette.app import Datasette
34  from datasette.plugins import pm
35  import pytest
36  
37  
38  @pytest.fixture
39  def unsafe():
40      class UnsafeInsertAll:
51  
52  
53  @pytest.mark.asyncio
54  async def test_insert_alter(ds, unsafe):
55      async with httpx.AsyncClient(app=ds.app()) as client:
56          response = await client.post(

til/pytest/pytest-recording-vcr.md

1   # Using VCR and pytest with pytest-recording
2   
3   [pytest-recording](https://github.com/kiwicom/pytest-recording) is a neat pytest plugin that makes it easy to use the [VCR library](https://vcrpy.readthedocs.io/), which helps write tests against HTTP resources by automatically capturing responses and baking them into a YAML file to be replayed during the tests.
4   
5   It even works with [boto3](https://aws.amazon.com/sdk-for-python/)!
6   
7   To use it, first install it with `pip install pytest-recording` and then add the `@pytest.mark.vcr` decorator to a test that makes HTTP calls:
8   
9   ```python
10  @pytest.mark.vcr
11  def test_create():
12      runner = CliRunner()
13      with runner.isolated_filesystem():
14          result = runner.invoke(cli, ["create", "pytest-bucket-simonw-1", "-c"])
15          assert result.exit_code == 0
16  ```
17  
18  The first time you run the tests, use the `--record-mode=once` option:
19  
20      pytest -k test_create --record-mode=once
21  
22  This defaults to creating a YAML file in `tests/cassettes/test_s3_credentials/test_create.yaml`.
23  
24  Subsequent runs of `pytest -k test_create` will reuse those recorded HTTP requests and will not make any network requests - I confirmed this by turning off my laptop's WiFi.

til/pytest/pytest-mock-calls.md

1   # Quick and dirty mock testing with mock_calls
2   
3   I needed to write a test that checked for a really complex sequence of mock calls for [s3-credentials#3](https://github.com/simonw/s3-credentials/issues/3).
4   
5   I ended up using the following trick:
6   
7   ```python
8   def test_create(mocker):
9       boto3 = mocker.patch("boto3.client")
10      runner = CliRunner()
11      with runner.isolated_filesystem():
12          result = runner.invoke(cli, ["create", "pytest-bucket-simonw-1", "-c"])
13          assert [str(c) for c in boto3.mock_calls] == [
14              "call('s3')",
15              "call('iam')",
16              "call().head_bucket(Bucket='pytest-bucket-simonw-1')",
17              "call().get_user(UserName='s3.read-write.pytest-bucket-simonw-1')",
18              'call().put_user_policy(PolicyDocument=\'{"Version": "2012-10-17", "Statement": [{"Sid": "ListObjectsInBucket", "Effect": "Allow", "Action": ["s3:ListBucket"], "Resource": ["arn:aws:s3:::pytest-bucket-simonw-1"]}, {"Sid": "AllObjectActions", "Effect": "Allow", "Action": "s3:*Object", "Resource": ["arn:aws:s3:::pytest-bucket-simonw-1/*"]}]}\', PolicyName=\'s3.read-write.pytest-bucket-simonw-1\', UserName=\'s3.read-write.pytest-bucket-simonw-1\')',
19              "call().create_access_key(UserName='s3.read-write.pytest-bucket-simonw-1')",
20              "call().create_access_key().__getitem__('AccessKey')",
21              "call().create_access_key().__getitem__().__str__()",
22          ]
23  ```
24  I used the trick I describe in [How to cheat at unit tests with pytest and Black](https://simonwillison.net/2020/Feb/11/cheating-at-unit-tests-pytest-black/) where I run that comparison against an empty `[]` list, then use `pytest --pdb` to drop into a debugger and copy and paste the output of `[str(c) for c in boto3.mock_calls]` into my test code.
25  
26  Initially I used a comparison directly against `boto3.mock_calls` - but this threw a surprising error. The calls sequence I baked into my tests looked like this:
27  
28  ```python
29  from unittest.mock import call
30  
31  # ...
34              call("s3"),
35              call("iam"),
36              call().head_bucket(Bucket="pytest-bucket-simonw-1"),
37              call().get_user(UserName="s3.read-write.pytest-bucket-simonw-1"),
38              call().put_user_policy(
39                  PolicyDocument='{"Version": "2012-10-17", "Statement": [{"Sid": "ListObjectsInBucket", "Effect": "Allow", "Action": ["s3:ListBucket"], "Resource": ["arn:aws:s3:::pytest-bucket-simonw-1"]}, {"Sid": "AllObjectActions", "Effect": "Allow", "Action": "s3:*Object", "Resource": ["arn:aws:s3:::pytest-bucket-simonw-1/*"]}]}',
40                  PolicyName="s3.read-write.pytest-bucket-simonw-1",
41                  UserName="s3.read-write.pytest-bucket-simonw-1",
42              ),
43              call().create_access_key(UserName="s3.read-write.pytest-bucket-simonw-1"),
44              call().create_access_key().__getitem__("AccessKey"),
45              call().create_access_key().__getitem__().__str__(),
46          ]
47  ```
48  But when I ran `pytest` that last one failed:
49  ```
50  E             -  'call().create_access_key().__getitem__()',
53  E             ?                                          ^^^^^^^^^^
54  ```
55  It turns out `__str__()` calls do not play well with the `call()` constructor - see [this StackOverflow question](https://stackoverflow.com/questions/61926147/how-to-represent-unittest-mock-call-str).
56  
57  My solution was to cast them all to `str()` using a list comprehension, which ended up fixing that problem.
59  ## Gotcha: parameter ordering
60  
61  There's one major flaw to the `str()` trick I'm using here: the order in which parameters are displayed in the string representation of `call()` may differ between Python versions. I had to undo this trick in one place I was using it ([see here](https://github.com/simonw/s3-credentials/issues/8)) as a result due to the following test failure:
62  
63  ```

til/pytest/pytest-argparse.md

1   # Writing pytest tests against tools written with argparse
2   
3   I usually build command-line tools using [Click](https://click.palletsprojects.com/) (and my [click-app](https://github.com/simonw/click-app) cookiecutter template), which includes a really nice [set of tools](https://click.palletsprojects.com/en/8.0.x/testing/) for writing tests.
4   
5   Today I decided to try building a tool called [stream-delay](https://github.com/simonw/stream-delay) using [argparse]() from the Python standard library, since it didn't need any other dependencies.
6   
7   The one challenge I had was how to write the tests. I used [pytest](https://pytest.org/) as a test-only dependency.
8   
9   Here's the pattern I came up with, using the [capsys pytest fixture](https://docs.pytest.org/en/6.2.x/capture.html) to capture standard output from my tool.
10  
11  ```python
12  from stream_delay import main
13  import pytest
14  
15  @pytest.mark.parametrize("option", ("-h", "--help"))
16  def test_help(capsys, option):
17      try:
18          main([option])
38      # ...
39  ```
40  As you can see, `main()` takes an optional list of arguments. The default for that is `None` which will cause `argparse` to read `sys.argv` - but I can inject arguments to the function from my tests if I need to.
41  
42  I'm catching the `SystemExit` exception because this will be raised by default if you use `-h` or `--help` - but I still want to finish my test execution so I can inspect the captured output.
43  
44  Complete code:
45  
46  - [stream_delay.py](https://github.com/simonw/stream-delay/blob/0.1/stream_delay.py)
47  - [tests/test_stream_delay.py](https://github.com/simonw/stream-delay/blob/0.1/tests/test_stream_delay.py)

til/pytest/only-run-integration.md

1   # Opt-in integration tests with pytest --integration
2   
3   For both [s3-credentials](https://github.com/simonw/s3-credentials) and [datasette-publish-fly](https://github.com/simonw/datasette-publish-fly) I have a need for real-world integration tests that actually interact with the underlying APIs (AWS or Fly) to create and destroy resources on those platforms.
4   
5   Most of the time I want my tests to run without doing these. I want the option to run `pytest --integration` to opt-in to running those extra integration tests.
6   
7   Here's the pattern I'm using. First, in `tests/conftest.py`:
8   
9   ```python
10  import pytest
11  
12  
13  def pytest_addoption(parser):
14      parser.addoption(
15          "--integration",
16          action="store_true",
17          default=False,
18          help="run integration tests",
19      )
20  
21  
22  def pytest_configure(config):
23      config.addinivalue_line(
24          "markers",
25          "integration: mark test as integration test, only run with --integration",
26      )
27  
28  
29  def pytest_collection_modifyitems(config, items):
30      if config.getoption("--integration"):
31          # Also run integration tests
32          return
33      skip_integration = pytest.mark.skip(reason="use --integration option to run")
34      for item in items:
35          if "integration" in item.keywords:
36              item.add_marker(skip_integration)
37  ```
38  This implements a `@pytest.mark.integration` marker which I can use to mark any test that should be considered part of the integration test suite. These will be skipped by default... but will not be skipped if the `--integration` option is passed to `pytest`.
39  
40  Then in the tests I can either do this:
41  
42  ```python
43  @pytest.mark.integration
44  def test_integration_s3():
45      # ...
46  ```
47  Or if I have a module that only contains integration tests - `tests/test_integration.py` - I can use the following line to apply that decorator to every test in the module:
48  ```python
49  import pytest
50  
51  pytestmark = pytest.mark.integration
52  
53  def test_integration_s3():
54      # ...
55  ```

til/pytest/mock-httpx.md

1   # How to mock httpx using pytest-mock
2   
3   I wrote this test to exercise some [httpx](https://pypi.org/project/httpx/) code today, using [pytest-mock](https://pypi.org/project/pytest-mock/).
4   
5   The key was to use `mocker.patch.object(cli, "httpx")` which patches the `httpx` module that was imported by the `cli` module.
6   
7   Here the `mocker` function argument is a fixture that is provided by `pytest-mock`.
8   
9   ```python
10  from conditional_get import cli
11  from click.testing import CliRunner
12  
13  
14  def test_performs_conditional_get(mocker):
15      m = mocker.patch.object(cli, "httpx")
16      m.get.return_value = mocker.Mock()
39          )
40  ```
41  https://github.com/simonw/conditional-get/blob/485fab46f01edd99818b829e99765ed9ce0978b5/tests/test_cli.py
42  
43  ## Mocking a JSON response
45  Here's a mock for a GraphQL POST request that returns JSON:
46  ```python
47  @pytest.fixture
48  def mock_graphql_region(mocker):
49      m = mocker.patch("datasette_publish_fly.httpx")
52      m.post.return_value.json.return_value = {"data": {"nearestRegion": {"code": "sjc"}}}
53  ```
54  https://github.com/simonw/datasette-publish-fly/blob/5253220bded001e94561e215d553f352838e7a1c/tests/test_publish_fly.py#L16-L21
55  
56  ## Mocking httpx.stream
67  https://stackoverflow.com/a/6112456 helped me figure out the following:
68  ```python
69  def test_performs_conditional_get(mocker):
70      m = mocker.patch.object(cli, "httpx")
71      m.stream.return_value.__enter__.return_value = mocker.Mock()
75      ]
76  ```
77  https://github.com/simonw/conditional-get/blob/80454f972d39e2b418572d7938146830fab98fa6/tests/test_cli.py
78  
79  ## Mocking an HTTP error triggered by response.raise_for_status()
81  The `response.raise_for_status()` raises an exception if an HTTP error (e.g. a 404 or 500) occurred.
82  
83  Here's how I [mocked that to return an error](https://github.com/simonw/airtable-to-yaml/blob/ebd94b2e29d6f2ec3dc64d161495a759330027e8/tests/test_airtable_to_yaml.py#L43-L56):
84  
85  ```python
86  def test_airtable_to_yaml_error(mocker):
87      m = mocker.patch.object(cli, "httpx")
88      m.get.return_value = mocker.Mock()

til/pytest/coverage-with-context.md

1   # pytest coverage with context
2   
3   [This tweet](https://twitter.com/mariatta/status/1499863816489734146) from \@Mariatta tipped me off to the ability to measure "contexts" when [running coverage](https://coverage.readthedocs.io/en/6.3.2/contexts.html#context-reporting) - as a way to tell which tests exercise which specific lines of code.
4   
5   My [sqlite-utils](https://github.com/simonw/sqlite-utils) project uses `pytest` for the test suite. I decided to figure out how to get this working with [pytest-cov](https://pypi.org/project/pytest-cov/).
6   
7   After some experimentation, this is the recipe that worked for me:
8   
9   ```
10  # In the virtual environment, make sure pytest-cov is installed:
11  % pip install pytest-cov
12  # First, run pytest to calculate coverage of the `sqlite_utils` package, with context
13  % pytest --cov=sqlite_utils --cov-context=test
14  # The .coverage file is actually a SQLite database:
15  % ls -lah .coverage
23  Here's what one of the pages looks like, displaying the context for some lines of code:
24  
25  ![The code has an expandable section which reveals which tests executed each individual line.](https://user-images.githubusercontent.com/9599/156860441-66e35994-653a-4ab7-b690-4d901fc57750.png)
26  
27  ## The .coverage schema

til/pytest/async-fixtures.md

1   # Async fixtures with pytest-asyncio
2   
3   I wanted to use a fixture with `pytest-asyncio` that was itsef as `async def` function, so that it could execute `await` statements.
4   
5   Since I'm using a `pytest.ini` file containing `asyncio_mode = strict` I had to use the `@pytest_asyncio.fixture` fixture to get this to work. Without that fixture I got this error:
6   
7   ```
9   E   AssertionError: assert False
10  E    +  where False = _has_explicit_asyncio_mark(<function ds_with_route at 0x11332d2d0>)
11  E    +    where <function ds_with_route at 0x11332d2d0> = <FixtureDef argname='ds_with_route' scope='function' baseid='tests/test_routes.py'>.func
12  ```
13  
14  Swapping `@pytest.fixture` for `@pytest_asyncio.fixture` fixed this problem:
15  
16  ```python
17  import pytest_asyncio
18  
19  @pytest_asyncio.fixture
20  async def ds_with_route():
21      ds = Datasette()

til/pytest/assert-dictionary-subset.md

1   # Asserting a dictionary is a subset of another dictionary
2   
3   My [lazy approach to writing unit tests](https://simonwillison.net/2020/Feb/11/cheating-at-unit-tests-pytest-black/) means that sometimes I want to run an assertion against most (but not all) of a dictionary.
4   
5   Take for example an API endpoint that returns something like this:
12  }
13  ```
14  I want to efficiently assert against the second two keys, but I don't want to hard-code the SQLite version into my test in case it changes in the future.
15  
16  Solution:
25  The trick here is using `expected.items() <= actual.items()` to assert that one dictionary is a subset of another.
26  
27  Here's a recent example test that uses this trick: https://github.com/simonw/datasette/blob/40885ef24e32d91502b6b8bbad1c7376f50f2830/tests/test_plugins.py#L414-L446

til/pytest/pytest-code-coverage.md

1   # Code coverage using pytest and codecov.io
2   
3   I got my [asgi-csrf](https://github.com/simonw/asgi-csrf) Python package up to 100% code coverage. Here's [the pull request](https://github.com/simonw/asgi-csrf/issues/13).
4   
5   I started by installing and using the [pytest-cov](https://pypi.org/project/pytest-cov/) pytest plugin.
6   
7   ```
8   pip install pytest-cov
9   pytest --cov=asgi_csrf
10  ```
11  This shows the current code coverage percentage for the `asgi_csrf` module in the terminal output:
13  collected 18 items                                                                                                                                                   
14  
15  test_asgi_csrf.py ..................                                                                                                                           [100%]
16  
17  ---------- coverage: platform darwin, python 3.7.3-final-0 -----------
23  ========= 18 passed in 0.37s =========
24  ```
25  To generate an HTML report showing which lines are not covered by tests:
26  ```
27  pytest --cov=asgi_csrf --cov-report=html
28  open htmlcov/index.html
29  ```
30  Here's a hosted copy of that report: https://asgi-csrf-htmlcov-ewca4t9se.vercel.app/asgi_csrf_py.html
31  
32  ## Failing the tests if coverage is below a certain threshold
33  
34  The `--cov-fail-under=100` option does this:
35  
36  ```
37  pytest --cov-fail-under=100 --cov asgi_csrf 
38  ======= test session starts =======
39  platform darwin -- Python 3.7.3, pytest-6.0.1, py-1.9.0, pluggy-0.13.1
40  rootdir: /Users/simon/Dropbox/Development/asgi-csrf
41  plugins: cov-2.10.1, asyncio-0.14.0
42  collected 18 items                                                                                                                                                   
43  
44  test_asgi_csrf.py ..................                                                                                                                           [100%]
45  
46  ---------- coverage: platform darwin, python 3.7.3-final-0 -----------
49  asgi_csrf.py     169     13    92%
50  
51  FAIL Required test coverage of 100% not reached. Total coverage: 92.31%
52  ```
53  I added this to my [GitHub test action](https://github.com/simonw/asgi-csrf/blob/83d2b4f6bb034b746fd3f20f57ebdbaeae007a73/.github/workflows/test.yml#L27-L29):
54  ```yaml
55      - name: Run tests
56        run: |
57          pytest --cov-fail-under=100 --cov asgi_csrf
58  ```
59  ## Pushing results to codecov.io
61  https://codecov.io/ offers free coverage reporting for open source projects. I authorized it against my GitHub account, then enabled it for the `asgi-csrf` project by navigating to https://codecov.io/gh/simonw/asgi-csrf (hacking the URL saves you from having to paginate through all of your repos looking for the right one).
62  
63  codecov.io gives you a token - set that as a GitHub repository secret as `CODECOV_TOKEN` - then add the following to the test action configuration:
64  ```yaml
65      - name: Upload coverage to codecov.io
69          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
70  ```
71  This will upload your coverage report (no matter if the previous test step failed or succeeded). codecove.io then reports back to pull requests and maintains a dashboard for your project.
72  
73  codecov.io doesn't detect if you use a `main` or `master` branch so I had to switch the default branch in the settings at https://codecov.io/gh/simonw/asgi-csrf/settings

til/pluggy/multiple-hooks-same-file.md

26  Which allows you to write more than one plugin implementation function in the same Python module file.
27  
28  Note that the `specname` feature requires [Pluggy 1.0.0](https://github.com/pytest-dev/pluggy/blob/main/CHANGELOG.rst#pluggy-100-2021-08-25) or higher.

til/npm/publish-web-component.md

31    "module": "datasette-table.js",
32    "scripts": {
33      "test": "echo \"Error: no test specified\" && exit 1",
34    },
35    "dependencies": {
60  ```
61    "scripts": {
62      "test": "echo \"Error: no test specified\" && exit 1",
63      "dev": "vite",
64      "build": "vite build",

til/nginx/proxy-domain-sockets.md

27  (The `$PWD` seems necessary to avoid `nginx` looking in its default directory.)
28  
29  Start something listening on the `/tmp/datasette.sock` path - with the latest Datasette you can do this:
30  
31      datasette --uds /tmp/datasette.sock

til/mediawiki/mediawiki-sqlite-macos.md

30  ## Downloading MediaWiki
31  
32  I downloaded the latest version of MediaWiki from [their downloads page](https://www.mediawiki.org/wiki/Download), unzipped it, ran `php -S localhost:8000` and got this error message:
33  
34  <img width="664" alt="MediaWiki_1_35" src="https://user-images.githubusercontent.com/9599/110229420-1d932280-7ebe-11eb-98f3-eb13fa7f07c7.png">
35  
36  Turns out the latest MediaWiki requires PHP 7.3.19, but the version bundled with my laptop was 7.3.11.
37  
38  I didn't want to mess around with upgrading PHP, so I used the [compatibility page](https://www.mediawiki.org/wiki/Compatibility#PHP) to figure out the most recent MediaWiki version that would work with PHP 7.3.11. I decided to try MediaWiki 1.31, which can be downloaded from <https://releases.wikimedia.org/mediawiki/1.31/?C=S;O=D>

til/macos/zsh-pip-install.md

1   # Running pip install -e .[test] in zsh on macOS Catalina
2   
3   macOS Catalina uses `zsh` rather than `bash` as the default shell (apparently because Apple don't like GPL 3).
9        . /Users/simon/.local/share/virtualenvs/datasette-AWNrQs95/bin/activate                                                                         
10      datasette %  . /Users/simon/.local/share/virtualenvs/datasette-AWNrQs95/bin/activate
11      (datasette) simon@Simons-MacBook-Pro datasette % pip install -e .[test]
12      zsh: no matches found: .[test]
13  
14  In `zsh` the `[` character has special meaning.
16  Two solutions. The first is to use quotes:
17  
18      datasette % pip install -e '.[test]'
19      Obtaining file:///Users/simon/Dropbox/Development/datasette
20      ...
22  The second is to prefix it with `noglob`:
23  
24      datasette % noglob pip install -e .[test]

til/linux/iconv.md

1   # Using iconv to convert the text encoding of a file
2   
3   In [sqlite-utils issue 439](https://github.com/simonw/sqlite-utils/issues/439) I was testing against a CSV file that used UTF16 little endian encoding, also known as `utf-16-le`.
4   
5   I converted it to UTF-8 using `iconv` like this:

til/javascript/jest-without-package-json.md

1   # Using Jest without a package.json
2   
3   I wanted to try out [Jest](https://jestjs.io/) for writing JavaScript unit tests, in a project that wasn't set up with `package.json` and other NPM related things.
4   
5   Jest looks for `*.spec.js` tests in a `__tests__` directory. It expects to find configuration in a `package.json` file but it can be passed configuration using the `-c` option - which can be a path to a JSON configuration file or can be a JSON literal.
6   
7   I created a file I wanted to test in `plugins.js` which looked like this. The `module.exports` at the bottom was required so Jest could later import the code:
8   
9   ```javascript
36  ```
37  
38  Then I created `__tests__/plugins.spec.js` with this:
39  
40  ```javascript
42  
43  describe("Datasette Plugins", () => {
44    test("it should have datasette.plugins", () => {
45      expect(!!datasette.plugins).toEqual(true);
46    });
47    test("registering a plugin should work", () => {
48      datasette.plugins.register("numbers", (a, b) => a + b, ["a", "b"]);
49      var result = datasette.plugins.call("numbers", { a: 1, b: 2 });
58  ```
59  % npx jest -c '{}'
60   PASS  __tests__/plugins.spec.js
61    Datasette Plugins
62      ✓ it should have datasette.plugins (3 ms)
67  Snapshots:   0 total
68  Time:        1.163 s
69  Ran all test suites.
70  ```

til/javascript/dropdown-menu-with-details-summary.md

22  </details>
23  ```
24  See the top right corner of https://latest-with-plugins.datasette.io/ for a demo.
25  
26  This displays an SVG icon which, when clicked, expands to show the menu. The SVG icon uses `aria-labelledby="nav-menu-svg-title" role="img"` and a `<title id="nav-menu-svg-title">` element for accessibility.

til/homebrew/upgrading-python-homebrew-packages.md

1   # Upgrading Python Homebrew packages using pip
2   
3   [VisiData 2.0](https://www.visidata.org/) came out today. I previously installed VisiData using Homebrew, but the VisiData tap has not yet been updated with the latest version.
4   
5   Homebrew Python packages (including the packages for [Datasette](https://formulae.brew.sh/formula/datasette) and [sqlite-utils](https://formulae.brew.sh/formula/sqlite-utils)) work by setting up their own package-specific virtual environments. This means you can upgrade them without waiting for the tap.

til/homebrew/packaging-python-cli-for-homebrew.md

54    end
55  
56    test do
57      system bin/"datasette", "--help"
58    end
97  `poet -f datasette` generates the full formula.
98  
99  You need to fill in the description and the `test` block, but other than that it looks like it should work straight away.
100 
101 ## Implementing the test block
102 
103 https://docs.brew.sh/Formula-Cookbook#add-a-test-to-the-formula says:
104 
105 > We want tests that don't require any user input and test the basic functionality of the application. For example `foo build-foo input.foo` is a good test and (despite their widespread use) `foo --version` and `foo --help` are bad tests. However, a bad test is better than no test at all.
106 
107 Here's the test block I ended up using for Datasette:
108 
109 ```ruby
110   test do
111     assert_match "15", shell_output("#{bin}/datasette --get '/:memory:.csv?sql=select+3*5'")
112     assert_match "<title>Datasette:", shell_output("#{bin}/datasette --get '/'")
114 ```
115 
116 And here's my test for `sqlite-utils`:
117 
118 ```ruby
119   test do
120     assert_match "15", shell_output("#{bin}/sqlite-utils :memory: 'select 3 * 5'")
121   end

til/homebrew/latest-sqlite.md

1   # Running the latest SQLite in Datasette using Homebrew
2   
3   I made a pleasant discovery today: Homebrew are very quick to update to the latest SQLite release (here's [their formula](https://github.com/Homebrew/homebrew-core/blob/master/Formula/sqlite.rb)), and since [Datasette](https://datasette.io/) when installed via Homebrew uses that version, this means you can use `brew update sqlite` to ensure you are running the most recent SQLite version within Datasette.
4   
5   If you've installed Datasette using Homebrew:

til/graphql/graphql-with-curl.md

1   # Using curl to run GraphQL queries from the command line
2   
3   I wanted to run a query against the GitHub GraphQL API using `curl` on the command line, while keeping the query itself as readable as possible. Here's the recipe I came up with (tested in both `bash` and `zsh`), with TOKEN replaced by my GitHub API personal access token:
4   ```
5   curl -s https://api.github.com/graphql -X POST \

til/heroku/pg-pull.md

27      heroku pg:pull HEROKU_POSTGRESQL_JADE_URL simonwillisonblog -a simonwillisonblog
28  
29  This created a local PostgreSQL database called `simonwillisonblog` and imported my latest backup.
30  
31  When I ran it a second time I had to use `dropdb simonwillisonblog` first to drop the existing local database.

til/github-actions/service-containers-docker.md

3   I have a Django application which uses PostgreSQL. I build the Django application into its own Docker container, push that built container to the GitHub package registery and then deploy that container to production.
4   
5   I wanted to run the tests inside the container as part of the deployment process, to make sure the container that I build is ready to be deployed (via continuous deployment).
6   
7   In production I'm using Digital Ocean PostgreSQL rather than running PostgreSQL in a container. For running the tests I decided to use GitHub's [PostgreSQL service containers](https://docs.github.com/en/actions/guides/creating-postgresql-service-containers) to run the tests.
8   
9   But how do you set it up so tests running inside a Docker container can talk to the PostgreSQL service container provided by the GitHub Actions environment?
10  
11  This took a while to figure out. The key insight was that Docker containers (at least on Linux) have a magic IP address, `172.17.0.1`, which can be used to access their host environment - and GitHub's PostgreSQL container is available to that host environment on localhost port 5432.
14  
15  ```yaml
16  name: Build, test and deploy
17  
18  on:
20  
21  jobs:
22    build_test_deploy:
23      runs-on: ubuntu-latest
24      services:
25        postgres:
42        run: |-
43          docker build -t my-tag .
44      - name: Run tests
45        run: |-
46          docker run \
47            -e DATABASE_URL="postgres://postgres:postgres@172.17.0.1:5432/postgres" \
48            --entrypoint=/app/github-actions-runtests.sh \
49            my-tag
50  ```
51  My `github-actions-runtests.sh` file uses [django-pytest](https://pytest-django.readthedocs.io/) and looks like this:
52  ```bash
53  #!/bin/bash
54  cd /app
55  pytest --ds=config.test_settings
56  ```

til/github/graphql-pagination-python.md

3   (See also [Building a self-updating profile README for GitHub](https://simonwillison.net/2020/Jul/10/self-updating-profile-readme/) on my blog)
4   
5   For my [auto-updating personal README](https://twitter.com/simonw/status/1281435464474324993) I needed to fetch the latest release for every repository I have on GitHub. Since I have 316 public repos I wanted the most efficent way possible to do this. I decided to use the [GitHub GraphQL API](https://developer.github.com/v4/).
6   
7   Their API allows you to fetch up to 100 repositories at once, and each one can return up to 100 releases. Since I only wanted the most recent release my query ended up looking like this:

til/github/graphql-search-topics.md

9   An oddity of GitHub search is that sort order can be defined using tokens that form part of the search query!
10  
11  Here's a GraphQL query [tested here](https://developer.github.com/v4/explorer/) that returns the most recent 100 `git-scraping` tagged repos, sorted by most recently updated.
12  
13  ```graphql

til/github/dependencies-graphql-api.md

24  I added `https://api.github.com/graphql` as the endpoint.
25  
26  I tested it by running these queries:
27  ```graphql
28  {

til/github-actions/python-3-11.md

1   # Testing against Python 3.11 preview using GitHub Actions
2   
3   I decided to run my CI tests against the Python 3.11 preview, to avoid the problem I had when Python 3.10 came out with [a bug that affected Datasette](https://simonwillison.net/2021/Oct/9/finding-and-reporting-a-bug/).
4   
5   I used the new [GitHub Code Search](https://cs.github.com/) to figure out how to do this. I searched for:
11      3.11-dev
12  
13  I added that to my test matrix like so:
14  
15  ```yaml
16  jobs:
17    test:
18      runs-on: ubuntu-latest
19      strategy:
20        matrix:
28      # ...
29  ```
30  Here's the [full workflow](https://github.com/simonw/datasette/blob/a9d8824617268c4d214dd3be2174ac452044f737/.github/workflows/test.yml).
31  

til/github-actions/prettier-github-actions.md

23  jobs:
24    prettier:
25      runs-on: ubuntu-latest
26      steps:
27      - name: Check out repo
39  ```
40  
41  The `npx prettier --check 'datasette/static/*[!.min].js'` line ensures that prettier is run in "check" mode (which fails the tests if a matching file does not conform to the formatting rules) - it checks any `.js` file in the `datasette/static` folder but excludes any `.min.js` minified files.
42  
43  I'm using `npx` to run Prettier which installs it if it is missing - as far as I can tell `npx` respects the `.npm` cache so I'm using that to avoid downloading a new copy of Prettier every time. **UPDATE:** Apparently it doesn't, see [#1169](https://github.com/simonw/datasette/issues/1169)

til/github-actions/postgresq-service-container.md

1   # Running tests against PostgreSQL in a service container
2   
3   I wanted to run some Django tests - using `pytest-django` and with Django configured to pick up the `DATABASE_URL` environment variable via [dj-database-url](https://github.com/jacobian/dj-database-url) - against a PostgreSQL server running in GitHub Actions.
4   
5   It took a while to figure out the right pattern. The trick was to define a `postgres:` service and then set the `DATABASE_URL` environment variable to the following:
7       postgres://postgres:postgres@127.0.0.1:${{ job.services.postgres.ports['5432'] }}/dbname
8   
9   Here's my full `.github/workflows/test.yml`:
10  
11  ```yaml
12  name: Run tests
13  
14  on: [push]
15  
16  jobs:
17    test:
18      runs-on: ubuntu-latest
19      services:
20        postgres:
44        run: |
45          pip install -r requirements.txt
46      - name: Run tests
47        env:
48          DATABASE_URL: postgres://postgres:postgres@127.0.0.1:${{ job.services.postgres.ports['5432'] }}/dbname
49        run: |
50          cd myproject
51          pytest
52  ```
53  
54  ## And against MySQL
55  
56  I had to figure this out against MySQL as well for `db-to-sqlite` - here's [the workflow test.yml file](https://github.com/simonw/db-to-sqlite/blob/1.4/.github/workflows/test.yml) I ended up with. Key extract here:
57  
58  ```yaml
62          env:
63            MYSQL_ALLOW_EMPTY_PASSWORD: yes
64            MYSQL_DATABASE: test_db_to_sqlite
65          options: >-
66            --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3
68            - 3306:3306
69      # ...
70      - name: Run tests
71        env:
72          MYSQL_TEST_DB_CONNECTION: mysql://root@127.0.0.1:${{ job.services.mysql.ports['3306'] }}/test_db_to_sqlite
73        run: pytest -vv
74  ```

til/github-actions/oxipng.md

56  ## Testing this in a branch first
57  
58  I tested this all in a branch first so that I could see if it was working correctly.
59  
60  Since my workflow usually pushes any changed files back to the same GitHub repository, I added a check to that step which caused it to only run on pushes to the `main` branch:

til/github-actions/markdown-table-of-contents.md

36  jobs:
37    build:
38      runs-on: ubuntu-latest
39      steps:
40      - name: Check out repo

til/github-actions/grep-tests.md

1   # Using grep to write tests in CI
2   
3   GitHub Actions workflows fail if any of the steps executes something that returns a non-zero exit code.
5   Today I learned that `grep` returns a non-zero exit code if it fails to find any matches.
6   
7   This means that piping to grep is a really quick way to write a test as part of an Actions workflow.
8   
9   I wrote a quick soundness check today using the new `datasette --get /path` option, which runs a fake HTTP request for that path through Datasette and returns the response to standard out. Here's an example:
12      - name: Build database
13        run: scripts/build.sh
14      - name: Run tests
15        run: |
16          datasette . --get /us/pillar-point | grep 'Rocky Beaches'

til/github-actions/different-steps-on-a-schedule.md

20  jobs:
21    build_and_deploy:
22      runs-on: ubuntu-latest
23      steps:
24      # ...

til/github-actions/different-postgresql-versions.md

1   # Installing different PostgreSQL server versions in GitHub Actions
2   
3   The GitHub Actions `ubuntu-latest` default runner currently includes an installation of PostgreSQL 13. The server is not running by default but you can interact with it like this:
4   ```
5   $ /usr/lib/postgresql/13/bin/postgres --version
15  This works with `postgresql-10` and `postgresql-11` as well as `postgresql-12`.
16  
17  I wanted to use a GitHub Actions matrix to run my tests against all four versions of PostgreSQL. Here's [my complete workflow](https://github.com/simonw/django-sql-dashboard/blob/1.0.1/.github/workflows/test.yml) - the relevant sections are below:
18  ```yaml
19  name: Test
22  
23  jobs:
24    test:
25      runs-on: ubuntu-latest
26      strategy:
27        matrix:
36          sudo apt-get update
37          sudo apt-get -y install "postgresql-$POSTGRESQL_VERSION"
38      - name: Run tests
39        env:
40          POSTGRESQL_VERSION: ${{ matrix.postgresql-version }}
42          export POSTGRESQL_PATH="/usr/lib/postgresql/$POSTGRESQL_VERSION/bin/postgres"
43          export INITDB_PATH="/usr/lib/postgresql/$POSTGRESQL_VERSION/bin/initdb"
44          pytest
45  ```
46  I modified my tests to call the `postgres` and `initdb` binaries specified by the `POSTGRESQL_PATH` and `INITDB_PATH` environment variables.

til/github-actions/deploy-live-demo-when-tests-pass.md

1   # Deploying a live Datasette demo when the tests pass
2   
3   I've implemented this pattern a bunch of times now - here's the version I've settled on for my [datasette-auth0 plugin](https://github.com/simonw/datasette-auth0) repository.
7   See below for publishing to Vercel.
8   
9   In `.github/workflows/test.yml`:
10  
11  ```yaml
15  
16  jobs:
17    test:
18      runs-on: ubuntu-latest
19      strategy:
20        matrix:
35      - name: Install dependencies
36        run: |
37          pip install -e '.[test]'
38      - name: Run tests
39        run: |
40          pytest
41    deploy_demo:
42      runs-on: ubuntu-latest
43      needs: [test]
44      if: github.ref == 'refs/heads/main'
45      steps:
65          gcloud config set run/region us-central1
66          gcloud config set project datasette-222320
67          wget https://latest.datasette.io/fixtures.db
68          datasette publish cloudrun fixtures.db \
69          --install https://github.com/simonw/datasette-auth0/archive/$GITHUB_SHA.zip \
75          --service datasette-auth0-demo
76  ```
77  The first job called `test` runs the Python tests in the repo. The second `deploy_demo` block is where things get interesting.
78  
79  ```yaml
80    deploy_demo:
81      runs-on: ubuntu-latest
82      needs: [test]
83      if: github.ref == 'refs/heads/main'
84  ```
85  The `needs: [test]` bit ensures this only runs if the tests pass first.
86  
87  `if: github.ref == 'refs/heads/main'` causes the deploy to only run on pushes to the `main` branch.
97  ## Deploying to Vercel
98  
99  [This example](https://github.com/simonw/datasette-hashed-urls/blob/659614c23cbc544915079c44b09b09b090400ff8/.github/workflows/test.yml) deploys to Vercel instead. The key difference is this:
100 
101 ```yaml
106         VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
107       run: |-
108         wget https://latest.datasette.io/fixtures.db
109         datasette publish vercel fixtures.db \
110           --project datasette-hashed-urls \

til/github-actions/debug-tmate.md

15  jobs:
16    build:
17      runs-on: ubuntu-latest
18      steps:
19      - uses: actions/checkout@v2
29  I ran `ssh JA69KaB2avRPRZSkRb8JPa9Gd@nyc1.tmate.io` and got a direction connection to the Action, with my project files all available thanks to the `- uses: actions/checkout@v2` step.
30  
31  Once I'd finish testing things out in that environment, I typed `touch continue` and the session ended itself.
32  
33  ## Starting a shell just for test failures on manual runs
34  
35  I had a tricky test failure that I wanted to debug interactively. Here's a recipe for starting a tmate shell ONLY if the previous step failed, and only if the run was triggered manually (using `workflow_dispatch`) - because I don't want an accidental test opening up a shell and burning up my GitHub Actions minutes allowance.
36  
37  ```yaml
38      steps:
39      - name: Run tests
40        run: pytest
41      - name: tmate session if tests fail
42        if: failure() && github.event_name == 'workflow_dispatch'
43        uses: mxschmitt/action-tmate@v3

til/github-actions/commit-if-file-changed.md

41  
42  ```yaml
43  name: Fetch latest data
44  
45  on:
51  jobs:
52    scheduled:
53      runs-on: ubuntu-latest
54      steps:
55      - name: Check out this repo
56        uses: actions/checkout@v2
57      - name: Fetch latest data
58        run: |-
59          curl https://c19downloads.azureedge.net/downloads/data/data_latest.json | jq . > data_latest.json
60          curl https://c19pub.azureedge.net/utlas.geojson | gunzip | jq . > utlas.geojson
61          curl https://c19pub.azureedge.net/countries.geojson | gunzip | jq . > countries.geojson
67          git add -A
68          timestamp=$(date -u)
69          git commit -m "Latest data: ${timestamp}" || exit 0
70          git push
71  ```

til/electron/sign-notarize-electron-macos.md

150         "./dist/mac/Datasette.app/Contents/Resources/python/bin/python3.9",
151         "./dist/mac/Datasette.app/Contents/Resources/python/lib/python3.9/lib-dynload/xxlimited.cpython-39-darwin.so",
152         "./dist/mac/Datasette.app/Contents/Resources/python/lib/python3.9/lib-dynload/_testcapi.cpython-39-darwin.so"
153       ]
154     },
193 ## Automating it all with GitHub Actions
194 
195 I decided to build and notarize on _every push_ to my repository, so I could save the resulting build as an artifact and install any in-progress work on a computer to test it.
196 
197 Apple [limit you to 75 notarizations a day](https://developer.apple.com/documentation/security/notarizing_macos_software_before_distribution/customizing_the_notarization_workflow#3561440) so I think this is OK for my projects.
198 
199 My full [test.yml](https://github.com/simonw/datasette-app/blob/0.1.0/.github/workflows/test.yml) looks like this:
200 
201 ```yaml
205 
206 jobs:
207   test:
208     runs-on: macos-latest
209     steps:
210       - uses: actions/checkout@v2
232         run: |
233           ./download-python.sh
234       - name: Run tests
235         run: npm test
236         timeout-minutes: 5
237       - name: Build distribution

til/docker/docker-for-mac-container-to-postgresql-on-host.md

5   When I deploy applications to Fly.io I build them as Docker containers and inject the Fly PostgreSQL database details as a `DATABASE_URL` environment variable.
6   
7   In order to test those containers on my laptop, I needed to figure out a way to set a `DATABASE_URL` that would point to the PostgreSQL I have running on my own laptop - so that I didn't need to spin up another PostgreSQL Docker container just for testing purposes.
8   
9   ## host.docker.internal

til/docker/debian-unstable-packages.md

3   For [Datasette #1249](https://github.com/simonw/datasette/issues/1249) I wanted to build a Docker image from the `python:3.9.2-slim-buster` base image ("buster" is the current stable release of Debian) but include a single package from "sid", the unstable Debian distribution.
4   
5   I needed to do this because the latest version of SpatiaLite, version 5, was available in `sid` but not in `buster` (which only has 4.3.0a):
6   
7   https://packages.debian.org/search?keywords=spatialite
8   
9   <img width="923" alt="Package libsqlite3-mod-spatialite&#13;&#13;stretch (oldstable) (libs): Geospatial extension for SQLite - loadable module&#13;    4.3.0a-5+b1: amd64 arm64 armel armhf i386 mips mips64el mipsel ppc64el s390x&#13;    buster (stable) (libs): Geospatial extension for SQLite - loadable module&#13;    4.3.0a-5+b2: amd64 arm64 armel armhf i386 mips mips64el mipsel ppc64el s390x&#13;    bullseye (testing) (libs): Geospatial extension for SQLite - loadable module&#13;    5.0.1-2: amd64 arm64 armel armhf i386 mips64el mipsel ppc64el s390x&#13;    sid (unstable) (libs): Geospatial extension for SQLite - loadable module&#13;    5.0.1-2: alpha amd64 arm64 armel armhf hppa i386 m68k mips64el mipsel ppc64 ppc64el riscv64 s390x sh4 sparc64 x32&#13;    experimental (libs): Geospatial extension for SQLite - loadable module&#13;    5.0.0~beta0-1~exp2 [debports]: powerpcspe" src="https://user-images.githubusercontent.com/9599/112061886-5cf77b00-8b1c-11eb-8f4c-91dce388dc33.png">
10  
11  The recipe that ended up working for me was to install `software-properties-common` to get the `apt-get-repository` command, then use that to install a package from `sid`:

til/digitalocean/datasette-on-digitalocean-app-platform.md

35  
36  ```
37  wget https://latest.datasette.io/fixtures.db
38  ```
39  And this resulted in the `fixtures.db` folder being served at `/fixtures` under my app's subdomain.

til/django/testing-django-admin-with-pytest.md

1   # Writing tests for the Django admin with pytest-django
2   
3   I'm using [pytest-django](https://pytest-django.readthedocs.io/) on a project and I wanted to write a test for a Django admin create form submission. Here's the pattern I came up with:
4   
5   ```python
6   from .models import Location
7   import pytest
8   
9   
10  def test_admin_create_location_sets_public_id(client, admin_user):
11      client.force_login(admin_user)
12      assert Location.objects.count() == 0
28      assert location.public_id == "lc"
29  ```
30  The trick here is to use the `client` and `admin_user` pytest-django fixtures ([documented here](https://pytest-django.readthedocs.io/en/latest/helpers.html#fixtures)) to get a configured test client and admin user object, then use `client.force_login(admin_user)` to obtain a session where that user is signed-in to the admin. Then write tests as normal.
31  
32  ## Using the admin_client fixture
33  
34  Even better: use the `admin_client` fixture provided by `pytest-django 
35  ` which is already signed into the admin:
36  
37  ```python
38  def test_admin_create_location_sets_public_id(admin_client):
39      response = admin_client.post(
40          "/admin/core/location/add/",
45  
46  ```python
47  import pytest
48  
49  
50  @pytest.fixture()
51  def admin_client(client, admin_user):
52      client.force_login(admin_user)
53      return client
54  
55  # Then write tests like this:
56  def test_admin_create_location_sets_public_id(admin_client):
57      response = admin_client.post(
58          "/admin/core/location/add/",

til/django/just-with-django.md

31  # I used *options to allow this to accept options, which means I can run:
32  #
33  #    just test -k auth --pdb
34  #
35  # To pass the "-k auth --pdb" options to pytest
36  
37  @test *options:
38    pipenv run pytest {{options}}
39  
40  # This starts the Django development server with an extra environment variable
56  export DATABASE_URL := "postgresql://localhost/myproject"
57  
58  @test *options:
59    pipenv run pytest {{options}}
60  
61  @server:
75      manage *options
76      server
77      test *options
78  ```
79  
80  To run all of my tests:
81  
82      just
83  
84  To run specific tests
85  
86      just test -k name
87  
88  To run tests, stopping at the first error and opening a debugger:
89  
90      just test -x --pdb
91  
92  To start my development server running:

til/django/efficient-bulk-deletions-in-django.md

25  This didn't quite work either, because I have another model `Location` with foreign key references to those reports. So I added this:
26  ```python
27  Location.objects.filter(latest_report__public_id__in=report_ids).update(
28      latest_report=None
29  )
30  ```

til/datasette/search-all-columns-trick.md

78  I tried this against the FiveThirtyEight database and the query it produced was way beyond the URL length limit for Cloud Run.
79  
80  Here's the result if [run against latest.datasette.io/fixtures](https://latest.datasette.io/fixtures?sql=with+tables+as+%28%0D%0A++select%0D%0A++++name+as+table_name%0D%0A++from%0D%0A++++sqlite_master%0D%0A++where%0D%0A++++type+%3D+%27table%27%0D%0A%29%2C%0D%0Aqueries+as+%28%0D%0A++select%0D%0A++++%27select+%27%27%27+%7C%7C+tables.table_name+%7C%7C+%27%27%27+as+_table%2C+rowid+from+%22%27+%7C%7C+tables.table_name+%7C%7C+%27%22+where+%27+%7C%7C+group_concat%28%0D%0A++++++%27%22%27+%7C%7C+name+%7C%7C+%27%22+like+%27%27%25%27%27+%7C%7C+%3Asearch+%7C%7C+%27%27%25%27%27%27%2C%0D%0A++++++%27+or+%27%0D%0A++++%29+as+query%0D%0A++from%0D%0A++++pragma_table_info%28tables.table_name%29%2C%0D%0A++++tables%0D%0A++group+by%0D%0A++++tables.table_name%0D%0A%29%0D%0Aselect%0D%0A++group_concat%28query%2C+%27+union+all+%27%29%0D%0Afrom%0D%0A++queries):
81  
82  ```sql
386   or "frequency" like '%' || :search || '%'
387 ```
388 [It works!](https://latest.datasette.io/fixtures?sql=select%0D%0A++%27123_starts_with_digits%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22123_starts_with_digits%22%0D%0Awhere%0D%0A++%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27Table+With+Space+In+Name%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22Table+With+Space+In+Name%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27attraction_characteristic%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22attraction_characteristic%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27binary_data%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22binary_data%22%0D%0Awhere%0D%0A++%22data%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27complex_foreign_keys%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22complex_foreign_keys%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22f1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22f2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22f3%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27compound_primary_key%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22compound_primary_key%22%0D%0Awhere%0D%0A++%22pk1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22pk2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27compound_three_primary_keys%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22compound_three_primary_keys%22%0D%0Awhere%0D%0A++%22pk1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22pk2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22pk3%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27custom_foreign_key_label%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22custom_foreign_key_label%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_with_custom_label%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27facet_cities%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22facet_cities%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27facetable%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22facetable%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22created%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22planet_int%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22on_earth%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22state%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22city_id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22neighborhood%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22tags%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22complex_array%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22distinct_some_null%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27foreign_key_references%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22foreign_key_references%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_with_label%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_with_blank_label%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_with_no_label%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_compound_pk1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22foreign_key_compound_pk2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27infinity%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22infinity%22%0D%0Awhere%0D%0A++%22value%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27no_primary_key%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22no_primary_key%22%0D%0Awhere%0D%0A++%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22a%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22b%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22c%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27primary_key_multiple_columns%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22primary_key_multiple_columns%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27primary_key_multiple_columns_explicit_label%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22primary_key_multiple_columns_explicit_label%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27roadside_attraction_characteristics%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22roadside_attraction_characteristics%22%0D%0Awhere%0D%0A++%22attraction_id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22characteristic_id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27roadside_attractions%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22roadside_attractions%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22address%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22latitude%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22longitude%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22text1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22text2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name+with+.+and+spaces%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts%22%0D%0Awhere%0D%0A++%22text1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22text2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22name+with+.+and+spaces%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts_docsize%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts_docsize%22%0D%0Awhere%0D%0A++%22docid%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22size%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts_segdir%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts_segdir%22%0D%0Awhere%0D%0A++%22level%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22idx%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22start_block%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22leaves_end_block%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22end_block%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22root%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts_segments%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts_segments%22%0D%0Awhere%0D%0A++%22blockid%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22block%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_fts_stat%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_fts_stat%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22value%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27searchable_tags%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22searchable_tags%22%0D%0Awhere%0D%0A++%22searchable_id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22tag%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27select%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22select%22%0D%0Awhere%0D%0A++%22group%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22having%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22and%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22json%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27simple_primary_key%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22simple_primary_key%22%0D%0Awhere%0D%0A++%22id%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27sortable%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22sortable%22%0D%0Awhere%0D%0A++%22pk1%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22pk2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22sortable%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22sortable_with_nulls%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22sortable_with_nulls_2%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22text%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27table%2Fwith%2Fslashes.csv%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22table%2Fwith%2Fslashes.csv%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22content%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27tags%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22tags%22%0D%0Awhere%0D%0A++%22tag%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0Aunion+all%0D%0Aselect%0D%0A++%27units%27+as+_table%2C%0D%0A++rowid%0D%0Afrom%0D%0A++%22units%22%0D%0Awhere%0D%0A++%22pk%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22distance%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27%0D%0A++or+%22frequency%22+like+%27%25%27+%7C%7C+%3Asearch+%7C%7C+%27%25%27&search=museum&_hide_sql=1)

til/cookiecutter/pytest-for-cookiecutter.md

1   # Testing cookiecutter templates with pytest
2   
3   I added some unit tests to my [datasette-plugin](https://github.com/simonw/datasette-plugin) cookiecutter template today, since the latest features involved adding a `hooks/post_gen_project.py` script.
4   
5   Here's [the full test script](https://github.com/simonw/datasette-plugin/blob/503e6fef8e1000ab70103a61571d47ce966064ba/tests/test_cookiecutter_template.py) I wrote. It lives in `tests/test_cookiecutter_template.py` in the root of the repository.
6   
7   To run the tests I have to use `pytest tests` because running just `pytest` gets confused when it tries to run the templated tests that form part of the cookiecutter template.
8   
9   The pattern I'm using looks like this:
16  
17  
18  def test_static_and_templates(tmpdir):
19      cookiecutter(
20          template=TEMPLATE_DIRECTORY,
33          "datasette-foo/.github/workflows",
34          "datasette-foo/.github/workflows/publish.yml",
35          "datasette-foo/.github/workflows/test.yml",
36          "datasette-foo/.gitignore",
37          "datasette-foo/datasette_foo",
41          "datasette-foo/README.md",
42          "datasette-foo/setup.py",
43          "datasette-foo/tests",
44          "datasette-foo/tests/test_foo.py",
45      }
46      setup_py = (tmpdir / "datasette-foo" / "setup.py").read_text("utf-8")

til/bash/finding-bom-csv-files-with-ripgrep.md

1   # Finding CSV files that start with a BOM using ripgrep
2   
3   For [sqlite-utils issue 250](https://github.com/simonw/sqlite-utils/issues/250) I needed to locate some test CSV files that start with a UTF-8 BOM.
4   
5   Here's how I did that using [ripgrep](https://github.com/BurntSushi/ripgrep):

til/aws/s3-cors.md

5   This configuration happens at the bucket level - it's not something that can be applied to individual items.
6   
7   [Here's their documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/enabling-cors-examples.html). As with so many AWS things it involves hand-crafting a JSON document: the documentation for that format, with useful examples, [is here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ManageCorsUsing.html).
8   
9   I opted to use the S3 web console option - find the bucket in the console interface, click the "Security" tab and you can paste in a JSON configuration.

til/asgi/lifespan-test-httpx.md

1   # Writing tests for the ASGI lifespan protocol with HTTPX
2   
3   Uvicorn silently ignores exceptions that occur during startup against the ASGI lifespan protocol - see [starlette/issues/486](https://github.com/encode/starlette/issues/486).
7   This exposed a bug in `datasette-debug-asgi`: it wasn't handling lifespan events correctly. [datasette-debug-asgi/issues/1](https://github.com/simonw/datasette-debug-asgi/issues/1)
8   
9   The unit tests weren't catching this because using HTTPX to make test requests [doesn't trigger lifespan events](https://github.com/encode/httpx/issues/350).
10  
11  Florimond Manca had run into this problem too, and built [asgi-lifespan](https://github.com/florimondmanca/asgi-lifespan) to address it.
13  You can wrap an ASGI app in `async with LifespanManager(app):` and the correct lifespan events will be fired by that with block.
14  
15  Here's how to use it to [trigger lifespan events in a test](https://github.com/simonw/datasette-debug-asgi/blob/72d568d32a3159c763ce908c0b269736935c6987/test_datasette_debug_asgi.py):
16  
17  ```python
18  from asgi_lifespan import LifespanManager
19  
20  @pytest.mark.asyncio
21  async def test_datasette_debug_asgi():
22      ds = Datasette([], memory=True)
23      app = ds.app()

til/README.md

16  * [Running different steps on a schedule](https://github.com/simonw/til/blob/main/github-actions/different-steps-on-a-schedule.md) - 2020-04-20
17  * [Updating a Markdown table of contents with a GitHub Action](https://github.com/simonw/til/blob/main/github-actions/markdown-table-of-contents.md) - 2020-07-22
18  * [Using grep to write tests in CI](https://github.com/simonw/til/blob/main/github-actions/grep-tests.md) - 2020-08-19
19  * [Skipping a GitHub Actions step without failing](https://github.com/simonw/til/blob/main/github-actions/continue-on-error.md) - 2020-08-22
20  * [Open a debugging shell in GitHub Actions with tmate](https://github.com/simonw/til/blob/main/github-actions/debug-tmate.md) - 2020-09-14
21  * [Talking to a PostgreSQL service container from inside a Docker container](https://github.com/simonw/til/blob/main/github-actions/service-containers-docker.md) - 2020-09-18
22  * [Using Prettier to check JavaScript code style in GitHub Actions](https://github.com/simonw/til/blob/main/github-actions/prettier-github-actions.md) - 2020-12-31
23  * [Running tests against PostgreSQL in a service container](https://github.com/simonw/til/blob/main/github-actions/postgresq-service-container.md) - 2021-02-23
24  * [Installing different PostgreSQL server versions in GitHub Actions](https://github.com/simonw/til/blob/main/github-actions/different-postgresql-versions.md) - 2021-07-05
25  * [Attaching a generated file to a GitHub release using Actions](https://github.com/simonw/til/blob/main/github-actions/attach-generated-file-to-release.md) - 2021-09-07
27  * [Testing against Python 3.11 preview using GitHub Actions](https://github.com/simonw/til/blob/main/github-actions/python-3-11.md) - 2022-02-02
28  * [Using the GitHub Actions cache with npx and no package.json](https://github.com/simonw/til/blob/main/github-actions/npm-cache-with-npx-no-package.md) - 2022-03-22
29  * [Deploying a live Datasette demo when the tests pass](https://github.com/simonw/til/blob/main/github-actions/deploy-live-demo-when-tests-pass.md) - 2022-03-27
30  * [GitHub Actions job summaries](https://github.com/simonw/til/blob/main/github-actions/job-summaries.md) - 2022-05-17
31  * [Optimizing PNGs in GitHub Actions using Oxipng](https://github.com/simonw/til/blob/main/github-actions/oxipng.md) - 2022-05-18
108 ## macos
109 
110 * [Running pip install -e .[test] in zsh on macOS Catalina](https://github.com/simonw/til/blob/main/macos/zsh-pip-install.md) - 2020-04-21
111 * [Get Skitch working on Catalina](https://github.com/simonw/til/blob/main/macos/skitch-catalina.md) - 2020-04-21
112 * [Close terminal window on Ctrl+D for macOS](https://github.com/simonw/til/blob/main/macos/close-terminal-on-ctrl-d.md) - 2020-04-21
136 * [Restricting SSH connections to devices within a Tailscale network](https://github.com/simonw/til/blob/main/tailscale/lock-down-sshd.md) - 2020-04-23
137 
138 ## pytest
139 
140 * [Session-scoped temporary directories in pytest](https://github.com/simonw/til/blob/main/pytest/session-scoped-tmp.md) - 2020-04-26
141 * [How to mock httpx using pytest-mock](https://github.com/simonw/til/blob/main/pytest/mock-httpx.md) - 2020-04-29
142 * [Asserting a dictionary is a subset of another dictionary](https://github.com/simonw/til/blob/main/pytest/assert-dictionary-subset.md) - 2020-05-28
143 * [Registering temporary pluggy plugins inside tests](https://github.com/simonw/til/blob/main/pytest/registering-plugins-in-tests.md) - 2020-07-21
144 * [Code coverage using pytest and codecov.io](https://github.com/simonw/til/blob/main/pytest/pytest-code-coverage.md) - 2020-08-15
145 * [Start a server in a subprocess during a pytest session](https://github.com/simonw/til/blob/main/pytest/subprocess-server.md) - 2020-08-31
146 * [Using VCR and pytest with pytest-recording](https://github.com/simonw/til/blob/main/pytest/pytest-recording-vcr.md) - 2021-11-02
147 * [Quick and dirty mock testing with mock_calls](https://github.com/simonw/til/blob/main/pytest/pytest-mock-calls.md) - 2021-11-02
148 * [Writing pytest tests against tools written with argparse](https://github.com/simonw/til/blob/main/pytest/pytest-argparse.md) - 2022-01-08
149 * [Testing a Click app with streaming input](https://github.com/simonw/til/blob/main/pytest/test-click-app-with-streaming-input.md) - 2022-01-09
150 * [Opt-in integration tests with pytest --integration](https://github.com/simonw/til/blob/main/pytest/only-run-integration.md) - 2022-01-26
151 * [pytest coverage with context](https://github.com/simonw/til/blob/main/pytest/coverage-with-context.md) - 2022-03-04
152 * [Async fixtures with pytest-asyncio](https://github.com/simonw/til/blob/main/pytest/async-fixtures.md) - 2022-03-19
153 * [Treating warnings as errors in pytest](https://github.com/simonw/til/blob/main/pytest/treat-warnings-as-errors.md) - 2022-04-01
154 
155 ## github
184 ## asgi
185 
186 * [Writing tests for the ASGI lifespan protocol with HTTPX](https://github.com/simonw/til/blob/main/asgi/lifespan-test-httpx.md) - 2020-06-29
187 
188 ## heroku
212 * [PostgreSQL full-text search in the Django Admin](https://github.com/simonw/til/blob/main/django/postgresql-full-text-search-admin.md) - 2020-07-25
213 * [Adding extra read-only information to a Django admin change page](https://github.com/simonw/til/blob/main/django/extra-read-only-admin-information.md) - 2021-02-25
214 * [Writing tests for the Django admin with pytest-django](https://github.com/simonw/til/blob/main/django/testing-django-admin-with-pytest.md) - 2021-03-02
215 * [Show the timezone for datetimes in the Django admin](https://github.com/simonw/til/blob/main/django/show-timezone-in-django-admin.md) - 2021-03-02
216 * [Pretty-printing all read-only JSON in the Django admin](https://github.com/simonw/til/blob/main/django/pretty-print-json-admin.md) - 2021-03-07
240 * [Upgrading Python Homebrew packages using pip](https://github.com/simonw/til/blob/main/homebrew/upgrading-python-homebrew-packages.md) - 2020-10-14
241 * [Running a MySQL server using Homebrew](https://github.com/simonw/til/blob/main/homebrew/mysql-homebrew.md) - 2021-06-11
242 * [Running the latest SQLite in Datasette using Homebrew](https://github.com/simonw/til/blob/main/homebrew/latest-sqlite.md) - 2022-02-28
243 
244 ## zsh
252 * [Using custom Sphinx templates on Read the Docs](https://github.com/simonw/til/blob/main/readthedocs/custom-sphinx-templates.md) - 2020-12-07
253 * [Promoting the stable version of the documentation using rel=canonical](https://github.com/simonw/til/blob/main/readthedocs/documentation-seo-canonical.md) - 2022-01-20
254 * [Linking from /latest/ to /stable/ on Read The Docs](https://github.com/simonw/til/blob/main/readthedocs/link-from-latest-to-stable.md) - 2022-01-20
255 
256 ## ics
330 ## cookiecutter
331 
332 * [Testing cookiecutter templates with pytest](https://github.com/simonw/til/blob/main/cookiecutter/pytest-for-cookiecutter.md) - 2021-01-27
333 * [Conditionally creating directories in cookiecutter](https://github.com/simonw/til/blob/main/cookiecutter/conditionally-creating-directories.md) - 2021-01-27
334 

tableau-to-sqlite/tests/test_tableau_to_sqlite.py

1   from click.testing import CliRunner
2   from tableau_to_sqlite.cli import cli
3   import pathlib
9   
10  @vcr.use_cassette(str(fixtures / "cassette.yml"))
11  def test_run(tmpdir):
12      runner = CliRunner()
13      db_path = str(tmpdir / "tableau.db")

swarm-to-sqlite/tests/test_save_checkin.py

1   from swarm_to_sqlite import utils
2   import pytest
3   import json
4   import sqlite_utils
12  
13  
14  @pytest.fixture(scope="session")
15  def converted():
16      db = sqlite_utils.Database(":memory:")
21  
22  
23  def test_tables(converted):
24      assert {
25          "venues",
40  
41  
42  def test_venue(converted):
43      venue = list(converted["venues"].rows)[0]
44      assert {
75  
76  
77  def test_event(converted):
78      event = list(converted["events"].rows)[0]
79      assert {"id": "5bf8e4fb646e38002c472397", "name": "A movie"} == event
96  
97  
98  def test_sticker(converted):
99      sticker = list(converted["stickers"].rows)[0]
100     assert {
112 
113 
114 def test_likes(converted):
115     likes = list(converted["likes"].rows)
116     assert [
121 
122 
123 def test_with_(converted):
124     with_ = list(converted["with"].rows)
125     assert [{"users_id": "900", "checkins_id": "592b2cfe09e28339ac543fde"}] == with_
126 
127 
128 def test_users(converted):
129     users = list(converted["users"].rows)
130     assert [
177 
178 
179 def test_photos(converted):
180     assert [
181         ForeignKey(
227 
228 
229 def test_posts(converted):
230     assert [
231         ForeignKey(
254 
255 
256 def test_checkin_with_no_event():
257     checkin = load_checkin()
258     # If no event in checkin, event column should be None
265 
266 
267 def test_view(converted):
268     assert {"checkin_details", "venue_details"} == set(converted.view_names())
269     assert [

swarm-to-sqlite/setup.py

33      """,
34      install_requires=["sqlite-utils>=3.3", "click", "requests"],
35      extras_require={"test": ["pytest"]},
36      tests_require=["swarm-to-sqlite[test]"],
37  )

tableau-to-sqlite/setup.py

33      """,
34      install_requires=["click", "TableauScraper==0.1.3"],
35      extras_require={"test": ["pytest", "vcrpy"]},
36      tests_require=["tableau-to-sqlite[test]"],
37      python_requires=">=3.6",
38  )

tableau-to-sqlite/README.md

66      pipenv shell
67  
68  Now install the dependencies and tests:
69  
70      pip install -e '.[test]'
71  
72  To run the tests:
73  
74      pytest

srccon-2020-datasette/build_database.py

7   
8   
9   def parse_times(s, datestring):
10      begin, end = s.split(" ")[0].split("-")
11      if not begin.endswith("m"):
13          begin += end[-2:]
14      begin_dt = parser.parse(
15          begin + " ET " + datestring, tzinfos={"ET": "America/New_York"}
16      )
17      end_dt = parser.parse(end + " ET " + datestring, tzinfos={"ET": "America/New_York"})
18      return begin_dt, end_dt
19  

srccon-2020-datasette/README.md

5   In this repository:
6   
7   * A [build_database.py](https://github.com/simonw/srccon-2020-datasette/blob/main/build_database.py) script which grabs the latest [SRCCON 2020 schedule JSON file](https://github.com/OpenNews/srccon-2020/blob/master/schedule/sessions.json) and uses the [sqlite-utils](https://github.com/simonw/sqlite-utils) Python library to convert it into a SQLite database
8   * A GitHub Actions workflow in [.github/workflows/build.yml](https://github.com/simonw/srccon-2020-datasette/blob/main/.github/workflows/build.yml) which runs that script and then deploys the resulting database to [Vercel](https://vercel.com/) using [datasette-publish-now](https://github.com/simonw/datasette-publish-now)
9   

sqlite-utils/tests/test_wal.py

1   import pytest
2   from sqlite_utils import Database
3   
4   
5   @pytest.fixture
6   def db_path_tmpdir(tmpdir):
7       path = tmpdir / "test.db"
8       db = Database(str(path))
9       return db, path, tmpdir
10  
11  
12  def test_enable_disable_wal(db_path_tmpdir):
13      db, path, tmpdir = db_path_tmpdir
14      assert len(tmpdir.listdir()) == 1
15      assert "delete" == db.journal_mode
16      assert "test.db-wal" not in [f.basename for f in tmpdir.listdir()]
17      db.enable_wal()
18      assert "wal" == db.journal_mode
19      db["test"].insert({"foo": "bar"})
20      assert "test.db-wal" in [f.basename for f in tmpdir.listdir()]
21      db.disable_wal()
22      assert "delete" == db.journal_mode
23      assert "test.db-wal" not in [f.basename for f in tmpdir.listdir()]

sqlite-utils/tests/test_utils.py

2   import csv
3   import io
4   import pytest
5   
6   
7   @pytest.mark.parametrize(
8       "input,expected,should_be_is",
9       [
17      ],
18  )
19  def test_decode_base64_values(input, expected, should_be_is):
20      actual = utils.decode_base64_values(input)
21      if should_be_is:
25  
26  
27  @pytest.mark.parametrize(
28      "size,expected",
29      (
34      ),
35  )
36  def test_chunks(size, expected):
37      input = ["a", "b", "c", "d"]
38      chunks = list(map(list, utils.chunks(input, size)))
40  
41  
42  def test_hash_record():
43      expected = "d383e7c0ba88f5ffcdd09be660de164b3847401a"
44      assert utils.hash_record({"name": "Cleo", "twitter": "CleoPaws"}) == expected
54  
55  
56  def test_maximize_csv_field_size_limit():
57      # Reset to default in case other tests have changed it
58      csv.field_size_limit(utils.ORIGINAL_CSV_FIELD_SIZE_LIMIT)
59      long_value = "a" * 131073
61      fp = io.BytesIO(long_csv.encode("utf-8"))
62      # Using rows_from_file should error
63      with pytest.raises(csv.Error):
64          rows, _ = utils.rows_from_file(fp, utils.Format.CSV)
65          list(rows)
Powered by Datasette