From 9dc7a1824be0a876b8f54e0edd4467d9a357e0cb Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 13 Apr 2019 15:49:07 -0700 Subject: [PATCH 1/6] Slightly more interesting example link --- docs/json_api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/json_api.rst b/docs/json_api.rst index 2606f3a0..cd034568 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -218,7 +218,7 @@ The Datasette table view takes a number of special querystring arguments: Some examples: - * `facetable?_where=state="MI"&_where=city_id=3 `__ + * `facetable?_where=neighborhood like "%c%"&_where=city_id=3 `__ * `facetable?_where=city_id in (select id from facet_cities where name != "Detroit") `__ ``?_group_count=COLUMN`` From 6da567dda953c7ac0e5500f17d8e220467a3499e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Apr 2019 14:51:20 -0700 Subject: [PATCH 2/6] Extract and refactor filters into filters.py This will help in implementing __in as a filter, refs #433 --- datasette/filters.py | 156 +++++++++++++++++++++++++++++++++++++++ datasette/utils.py | 137 ---------------------------------- datasette/views/table.py | 2 +- tests/test_filters.py | 64 ++++++++++++++++ tests/test_utils.py | 63 +--------------- 5 files changed, 222 insertions(+), 200 deletions(-) create mode 100644 datasette/filters.py create mode 100644 tests/test_filters.py diff --git a/datasette/filters.py b/datasette/filters.py new file mode 100644 index 00000000..5fd722f3 --- /dev/null +++ b/datasette/filters.py @@ -0,0 +1,156 @@ +import numbers +from .utils import detect_json1 + + +class Filter: + key = None + display = None + no_argument = False + + def where_clause(self, table, column, value, param_counter): + raise NotImplementedError + + def human_clause(self, column, value): + raise NotImplementedError + + +class TemplatedFilter(Filter): + def __init__(self, key, display, sql_template, human_template, format='{}', numeric=False, no_argument=False): + self.key = key + self.display = display + self.sql_template = sql_template + self.human_template = human_template + self.format = format + self.numeric = numeric + self.no_argument = no_argument + + def where_clause(self, table, column, value, param_counter): + converted = self.format.format(value) + if self.numeric and converted.isdigit(): + converted = int(converted) + if self.no_argument: + kwargs = { + 'c': column, + } + converted = None + else: + kwargs = { + 'c': column, + 'p': 'p{}'.format(param_counter), + 't': table, + } + return self.sql_template.format(**kwargs), converted + + def human_clause(self, column, value): + if callable(self.human_template): + template = self.human_template(column, value) + else: + template = self.human_template + if self.no_argument: + return template.format(c=column) + else: + return template.format(c=column, v=value) + + +class Filters: + _filters = [ + # key, display, sql_template, human_template, format=, numeric=, no_argument= + TemplatedFilter('exact', '=', '"{c}" = :{p}', lambda c, v: '{c} = {v}' if v.isdigit() else '{c} = "{v}"'), + TemplatedFilter('not', '!=', '"{c}" != :{p}', lambda c, v: '{c} != {v}' if v.isdigit() else '{c} != "{v}"'), + TemplatedFilter('contains', 'contains', '"{c}" like :{p}', '{c} contains "{v}"', format='%{}%'), + TemplatedFilter('endswith', 'ends with', '"{c}" like :{p}', '{c} ends with "{v}"', format='%{}'), + TemplatedFilter('startswith', 'starts with', '"{c}" like :{p}', '{c} starts with "{v}"', format='{}%'), + TemplatedFilter('gt', '>', '"{c}" > :{p}', '{c} > {v}', numeric=True), + TemplatedFilter('gte', '\u2265', '"{c}" >= :{p}', '{c} \u2265 {v}', numeric=True), + TemplatedFilter('lt', '<', '"{c}" < :{p}', '{c} < {v}', numeric=True), + TemplatedFilter('lte', '\u2264', '"{c}" <= :{p}', '{c} \u2264 {v}', numeric=True), + TemplatedFilter('glob', 'glob', '"{c}" glob :{p}', '{c} glob "{v}"'), + TemplatedFilter('like', 'like', '"{c}" like :{p}', '{c} like "{v}"'), + ] + ([TemplatedFilter('arraycontains', 'array contains', """rowid in ( + select {t}.rowid from {t}, json_each({t}.{c}) j + where j.value = :{p} + )""", '{c} contains "{v}"') + ] if detect_json1() else []) + [ + TemplatedFilter('isnull', 'is null', '"{c}" is null', '{c} is null', no_argument=True), + TemplatedFilter('notnull', 'is not null', '"{c}" is not null', '{c} is not null', no_argument=True), + TemplatedFilter('isblank', 'is blank', '("{c}" is null or "{c}" = "")', '{c} is blank', no_argument=True), + TemplatedFilter('notblank', 'is not blank', '("{c}" is not null and "{c}" != "")', '{c} is not blank', no_argument=True), + ] + _filters_by_key = { + f.key: f for f in _filters + } + + def __init__(self, pairs, units={}, ureg=None): + self.pairs = pairs + self.units = units + self.ureg = ureg + + def lookups(self): + "Yields (lookup, display, no_argument) pairs" + for filter in self._filters: + yield filter.key, filter.display, filter.no_argument + + def human_description_en(self, extra=None): + bits = [] + if extra: + bits.extend(extra) + for column, lookup, value in self.selections(): + filter = self._filters_by_key.get(lookup, None) + if filter: + bits.append(filter.human_clause(column, value)) + # Comma separated, with an ' and ' at the end + and_bits = [] + commas, tail = bits[:-1], bits[-1:] + if commas: + and_bits.append(', '.join(commas)) + if tail: + and_bits.append(tail[0]) + s = ' and '.join(and_bits) + if not s: + return '' + return 'where {}'.format(s) + + def selections(self): + "Yields (column, lookup, value) tuples" + for key, value in self.pairs: + if '__' in key: + column, lookup = key.rsplit('__', 1) + else: + column = key + lookup = 'exact' + yield column, lookup, value + + def has_selections(self): + return bool(self.pairs) + + def convert_unit(self, column, value): + "If the user has provided a unit in the query, convert it into the column unit, if present." + if column not in self.units: + return value + + # Try to interpret the value as a unit + value = self.ureg(value) + if isinstance(value, numbers.Number): + # It's just a bare number, assume it's the column unit + return value + + column_unit = self.ureg(self.units[column]) + return value.to(column_unit).magnitude + + def build_where_clauses(self, table): + sql_bits = [] + params = {} + i = 0 + for column, lookup, value in self.selections(): + filter = self._filters_by_key.get(lookup, None) + if filter: + sql_bit, param = filter.where_clause(table, column, self.convert_unit(column, value), i) + sql_bits.append(sql_bit) + if param is not None: + if not isinstance(param, list): + param = [param] + for individual_param in param: + param_id = 'p{}'.format(i) + params[param_id] = individual_param + i += 1 + return sql_bits, params diff --git a/datasette/utils.py b/datasette/utils.py index bb5c17d6..0c161ac6 100644 --- a/datasette/utils.py +++ b/datasette/utils.py @@ -584,143 +584,6 @@ def table_columns(conn, table): ] -class Filter: - def __init__(self, key, display, sql_template, human_template, format='{}', numeric=False, no_argument=False): - self.key = key - self.display = display - self.sql_template = sql_template - self.human_template = human_template - self.format = format - self.numeric = numeric - self.no_argument = no_argument - - def where_clause(self, table, column, value, param_counter): - converted = self.format.format(value) - if self.numeric and converted.isdigit(): - converted = int(converted) - if self.no_argument: - kwargs = { - 'c': column, - } - converted = None - else: - kwargs = { - 'c': column, - 'p': 'p{}'.format(param_counter), - 't': table, - } - return self.sql_template.format(**kwargs), converted - - def human_clause(self, column, value): - if callable(self.human_template): - template = self.human_template(column, value) - else: - template = self.human_template - if self.no_argument: - return template.format(c=column) - else: - return template.format(c=column, v=value) - - -class Filters: - _filters = [ - # key, display, sql_template, human_template, format=, numeric=, no_argument= - Filter('exact', '=', '"{c}" = :{p}', lambda c, v: '{c} = {v}' if v.isdigit() else '{c} = "{v}"'), - Filter('not', '!=', '"{c}" != :{p}', lambda c, v: '{c} != {v}' if v.isdigit() else '{c} != "{v}"'), - Filter('contains', 'contains', '"{c}" like :{p}', '{c} contains "{v}"', format='%{}%'), - Filter('endswith', 'ends with', '"{c}" like :{p}', '{c} ends with "{v}"', format='%{}'), - Filter('startswith', 'starts with', '"{c}" like :{p}', '{c} starts with "{v}"', format='{}%'), - Filter('gt', '>', '"{c}" > :{p}', '{c} > {v}', numeric=True), - Filter('gte', '\u2265', '"{c}" >= :{p}', '{c} \u2265 {v}', numeric=True), - Filter('lt', '<', '"{c}" < :{p}', '{c} < {v}', numeric=True), - Filter('lte', '\u2264', '"{c}" <= :{p}', '{c} \u2264 {v}', numeric=True), - Filter('glob', 'glob', '"{c}" glob :{p}', '{c} glob "{v}"'), - Filter('like', 'like', '"{c}" like :{p}', '{c} like "{v}"'), - ] + ([Filter('arraycontains', 'array contains', """rowid in ( - select {t}.rowid from {t}, json_each({t}.{c}) j - where j.value = :{p} - )""", '{c} contains "{v}"') - ] if detect_json1() else []) + [ - Filter('isnull', 'is null', '"{c}" is null', '{c} is null', no_argument=True), - Filter('notnull', 'is not null', '"{c}" is not null', '{c} is not null', no_argument=True), - Filter('isblank', 'is blank', '("{c}" is null or "{c}" = "")', '{c} is blank', no_argument=True), - Filter('notblank', 'is not blank', '("{c}" is not null and "{c}" != "")', '{c} is not blank', no_argument=True), - ] - _filters_by_key = { - f.key: f for f in _filters - } - - def __init__(self, pairs, units={}, ureg=None): - self.pairs = pairs - self.units = units - self.ureg = ureg - - def lookups(self): - "Yields (lookup, display, no_argument) pairs" - for filter in self._filters: - yield filter.key, filter.display, filter.no_argument - - def human_description_en(self, extra=None): - bits = [] - if extra: - bits.extend(extra) - for column, lookup, value in self.selections(): - filter = self._filters_by_key.get(lookup, None) - if filter: - bits.append(filter.human_clause(column, value)) - # Comma separated, with an ' and ' at the end - and_bits = [] - commas, tail = bits[:-1], bits[-1:] - if commas: - and_bits.append(', '.join(commas)) - if tail: - and_bits.append(tail[0]) - s = ' and '.join(and_bits) - if not s: - return '' - return 'where {}'.format(s) - - def selections(self): - "Yields (column, lookup, value) tuples" - for key, value in self.pairs: - if '__' in key: - column, lookup = key.rsplit('__', 1) - else: - column = key - lookup = 'exact' - yield column, lookup, value - - def has_selections(self): - return bool(self.pairs) - - def convert_unit(self, column, value): - "If the user has provided a unit in the query, convert it into the column unit, if present." - if column not in self.units: - return value - - # Try to interpret the value as a unit - value = self.ureg(value) - if isinstance(value, numbers.Number): - # It's just a bare number, assume it's the column unit - return value - - column_unit = self.ureg(self.units[column]) - return value.to(column_unit).magnitude - - def build_where_clauses(self, table): - sql_bits = [] - params = {} - for i, (column, lookup, value) in enumerate(self.selections()): - filter = self._filters_by_key.get(lookup, None) - if filter: - sql_bit, param = filter.where_clause(table, column, self.convert_unit(column, value), i) - sql_bits.append(sql_bit) - if param is not None: - param_id = 'p{}'.format(i) - params[param_id] = param - return sql_bits, params - - filter_column_re = re.compile(r'^_filter_column_\d+$') diff --git a/datasette/views/table.py b/datasette/views/table.py index 5923ac92..2c356bda 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -7,7 +7,6 @@ from sanic.request import RequestParameters from datasette.plugins import pm from datasette.utils import ( CustomRow, - Filters, InterruptedError, append_querystring, compound_keys_after_sql, @@ -27,6 +26,7 @@ from datasette.utils import ( urlsafe_components, value_as_boolean, ) +from datasette.filters import Filters from .base import BaseView, DatasetteError, ureg LINK_WITH_LABEL = '{label} {id}' diff --git a/tests/test_filters.py b/tests/test_filters.py new file mode 100644 index 00000000..b0cb3f34 --- /dev/null +++ b/tests/test_filters.py @@ -0,0 +1,64 @@ +from datasette.filters import Filters +import pytest + + +@pytest.mark.parametrize('args,expected_where,expected_params', [ + ( + { + 'name_english__contains': 'foo', + }, + ['"name_english" like :p0'], + ['%foo%'] + ), + ( + { + 'foo': 'bar', + 'bar__contains': 'baz', + }, + ['"bar" like :p0', '"foo" = :p1'], + ['%baz%', 'bar'] + ), + ( + { + 'foo__startswith': 'bar', + 'bar__endswith': 'baz', + }, + ['"bar" like :p0', '"foo" like :p1'], + ['%baz', 'bar%'] + ), + ( + { + 'foo__lt': '1', + 'bar__gt': '2', + 'baz__gte': '3', + 'bax__lte': '4', + }, + ['"bar" > :p0', '"bax" <= :p1', '"baz" >= :p2', '"foo" < :p3'], + [2, 4, 3, 1] + ), + ( + { + 'foo__like': '2%2', + 'zax__glob': '3*', + }, + ['"foo" like :p0', '"zax" glob :p1'], + ['2%2', '3*'] + ), + ( + { + 'foo__isnull': '1', + 'baz__isnull': '1', + 'bar__gt': '10' + }, + ['"bar" > :p0', '"baz" is null', '"foo" is null'], + [10] + ), +]) +def test_build_where(args, expected_where, expected_params): + f = Filters(sorted(args.items())) + sql_bits, actual_params = f.build_where_clauses("table") + assert expected_where == sql_bits + assert { + 'p{}'.format(i): param + for i, param in enumerate(expected_params) + } == actual_params diff --git a/tests/test_utils.py b/tests/test_utils.py index 07074e72..1ca202f4 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,6 +3,7 @@ Tests for various datasette helper functions. """ from datasette import utils +from datasette.filters import Filters import json import os import pytest @@ -133,68 +134,6 @@ def test_custom_json_encoder(obj, expected): assert expected == actual -@pytest.mark.parametrize('args,expected_where,expected_params', [ - ( - { - 'name_english__contains': 'foo', - }, - ['"name_english" like :p0'], - ['%foo%'] - ), - ( - { - 'foo': 'bar', - 'bar__contains': 'baz', - }, - ['"bar" like :p0', '"foo" = :p1'], - ['%baz%', 'bar'] - ), - ( - { - 'foo__startswith': 'bar', - 'bar__endswith': 'baz', - }, - ['"bar" like :p0', '"foo" like :p1'], - ['%baz', 'bar%'] - ), - ( - { - 'foo__lt': '1', - 'bar__gt': '2', - 'baz__gte': '3', - 'bax__lte': '4', - }, - ['"bar" > :p0', '"bax" <= :p1', '"baz" >= :p2', '"foo" < :p3'], - [2, 4, 3, 1] - ), - ( - { - 'foo__like': '2%2', - 'zax__glob': '3*', - }, - ['"foo" like :p0', '"zax" glob :p1'], - ['2%2', '3*'] - ), - ( - { - 'foo__isnull': '1', - 'baz__isnull': '1', - 'bar__gt': '10' - }, - ['"bar" > :p0', '"baz" is null', '"foo" is null'], - [10] - ), -]) -def test_build_where(args, expected_where, expected_params): - f = utils.Filters(sorted(args.items())) - sql_bits, actual_params = f.build_where_clauses("table") - assert expected_where == sql_bits - assert { - 'p{}'.format(i): param - for i, param in enumerate(expected_params) - } == actual_params - - @pytest.mark.parametrize('bad_sql', [ 'update blah;', 'PRAGMA case_sensitive_like = true' From 2c19a27d15a913e5f3dd443f04067169a6f24634 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Apr 2019 15:41:11 -0700 Subject: [PATCH 3/6] Documentation for filters, plus new documentation unit test https://simonwillison.net/2018/Jul/28/documentation-unit-tests/ --- docs/json_api.rst | 64 +++++++++++++++++++++++++++++++++++++++++++--- tests/test_docs.py | 18 +++++++++++++ 2 files changed, 79 insertions(+), 3 deletions(-) diff --git a/docs/json_api.rst b/docs/json_api.rst index cd034568..1ea35672 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -176,10 +176,68 @@ querystring arguments: .. _table_arguments: -Special table arguments ------------------------ +Table arguments +--------------- -The Datasette table view takes a number of special querystring arguments: +The Datasette table view takes a number of special querystring arguments. + +Column filter arguments +~~~~~~~~~~~~~~~~~~~~~~~ + +You can filter the data returned by the table based on column values using a querystring argument. + +``?column__exact=value`` or ``?_column=value`` + Returns rows where the specified column exactly matches the value. + +``?column__not=value`` + Returns rows where the column does not match the value. + +``?column__contains=value`` + Rows where the string column contains the specified value (``column like "%value%"`` in SQL). + +``?column__endswith=value`` + Rows where the string column ends with the specified value (``column like "%value"`` in SQL). + +``?column__startswith=value`` + Rows where the string column starts with the specified value (``column like "value%"`` in SQL). + +``?column__gt=value`` + Rows which are greater than the specified value. + +``?column__gte=value`` + Rows which are greater than or equal to the specified value. + +``?column__lt=value`` + Rows which are less than the specified value. + +``?column__lte=value`` + Rows which are less than or equal to the specified value. + +``?column__like=value`` + Match rows with a LIKE clause, case insensitive and with ``%`` as the wildcard character. + +``?column__glob=value`` + Similar to LIKE but uses Unix wildcard syntax and is case sensitive. + +``?column__arraycontains=value`` + Works against columns that contain JSON arrays - matches if any of the values in that array match. + + This is only available if the ``json1`` SQLite extension is enabled. + +``?column__isnull=1`` + Matches rows where the column is null. + +``?column__notnull=1`` + Matches rows where the column is not null. + +``?column__isblank=1`` + Matches rows where the column is blank, meaning null or the empty string. + +``?column__notblank=1`` + Matches rows where the column is not blank. + +Special table arguments +~~~~~~~~~~~~~~~~~~~~~~~ ``?_labels=on/off`` Expand foreign key references for every possible column. See below. diff --git a/tests/test_docs.py b/tests/test_docs.py index 6f84832d..caf1cff3 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -4,6 +4,7 @@ Tests to ensure certain things are documented. from click.testing import CliRunner from datasette import app from datasette.cli import cli +from datasette.filters import Filters from pathlib import Path import pytest import re @@ -71,3 +72,20 @@ def documented_views(): @pytest.mark.parametrize("view_class", [v for v in dir(app) if v.endswith("View")]) def test_view_classes_are_documented(documented_views, view_class): assert view_class in documented_views + + +@pytest.fixture(scope="session") +def documented_table_filters(): + json_api_rst = (docs_path / "json_api.rst").read_text() + section = json_api_rst.split(".. _table_arguments:")[-1] + # Lines starting with ``?column__exact= are docs for filters + return set( + line.split("__")[1].split("=")[0] + for line in section.split("\n") + if line.startswith("``?column__") + ) + + +@pytest.mark.parametrize("filter", [f.key for f in Filters._filters]) +def test_table_filters_are_documented(documented_table_filters, filter): + assert filter in documented_table_filters From 1c6649b19b09ea018d91a86db9c019a940fe36fc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Apr 2019 15:42:28 -0700 Subject: [PATCH 4/6] New colname__in=x,y,z filter, closes #433 --- datasette/filters.py | 33 +++++++++++++++++++++++++++++++-- docs/json_api.rst | 9 +++++++++ tests/test_filters.py | 22 ++++++++++++++++++++++ 3 files changed, 62 insertions(+), 2 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 5fd722f3..abaafc5b 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -1,5 +1,10 @@ +import json import numbers -from .utils import detect_json1 + +from .utils import ( + detect_json1, + escape_sqlite, +) class Filter: @@ -52,6 +57,29 @@ class TemplatedFilter(Filter): return template.format(c=column, v=value) +class InFilter(Filter): + key = 'in' + display = 'in' + + def __init__(self): + pass + + def split_value(self, value): + if value.startswith("["): + return json.loads(value) + else: + return [v.strip() for v in value.split(",")] + + def where_clause(self, table, column, value, param_counter): + values = self.split_value(value) + params = [":p{}".format(param_counter + i) for i in range(len(values))] + sql = "{} in ({})".format(escape_sqlite(column), ", ".join(params)) + return sql, values + + def human_clause(self, column, value): + return "{} in {}".format(column, json.dumps(self.split_value(value))) + + class Filters: _filters = [ # key, display, sql_template, human_template, format=, numeric=, no_argument= @@ -64,8 +92,9 @@ class Filters: TemplatedFilter('gte', '\u2265', '"{c}" >= :{p}', '{c} \u2265 {v}', numeric=True), TemplatedFilter('lt', '<', '"{c}" < :{p}', '{c} < {v}', numeric=True), TemplatedFilter('lte', '\u2264', '"{c}" <= :{p}', '{c} \u2264 {v}', numeric=True), - TemplatedFilter('glob', 'glob', '"{c}" glob :{p}', '{c} glob "{v}"'), TemplatedFilter('like', 'like', '"{c}" like :{p}', '{c} like "{v}"'), + TemplatedFilter('glob', 'glob', '"{c}" glob :{p}', '{c} glob "{v}"'), + InFilter(), ] + ([TemplatedFilter('arraycontains', 'array contains', """rowid in ( select {t}.rowid from {t}, json_each({t}.{c}) j where j.value = :{p} diff --git a/docs/json_api.rst b/docs/json_api.rst index 1ea35672..67700224 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -219,6 +219,15 @@ You can filter the data returned by the table based on column values using a que ``?column__glob=value`` Similar to LIKE but uses Unix wildcard syntax and is case sensitive. +``?column__in=value1,value2,value3`` + Rows where column matches any of the provided values. + + You can use a comma separated string, or you can use a JSON array. + + The JSON array option is useful if one of your matching values itself contains a comma: + + ``?column__in=["value","value,with,commas"]`` + ``?column__arraycontains=value`` Works against columns that contain JSON arrays - matches if any of the values in that array match. diff --git a/tests/test_filters.py b/tests/test_filters.py index b0cb3f34..a5d6e3d0 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -53,6 +53,28 @@ import pytest ['"bar" > :p0', '"baz" is null', '"foo" is null'], [10] ), + ( + { + 'foo__in': '1,2,3', + }, + ['foo in (:p0, :p1, :p2)'], + ["1", "2", "3"] + ), + # JSON array variants of __in (useful for unexpected characters) + ( + { + 'foo__in': '[1,2,3]', + }, + ['foo in (:p0, :p1, :p2)'], + [1, 2, 3] + ), + ( + { + 'foo__in': '["dog,cat", "cat[dog]"]', + }, + ['foo in (:p0, :p1)'], + ["dog,cat", "cat[dog]"] + ), ]) def test_build_where(args, expected_where, expected_params): f = Filters(sorted(args.items())) From 583b22aa28e26c318de0189312350ab2688c90b1 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Apr 2019 15:54:54 -0700 Subject: [PATCH 5/6] New ?column__date=yyyy-mm-dd filter --- datasette/filters.py | 1 + docs/json_api.rst | 3 +++ tests/test_filters.py | 8 ++++++++ 3 files changed, 12 insertions(+) diff --git a/datasette/filters.py b/datasette/filters.py index abaafc5b..483f031f 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -100,6 +100,7 @@ class Filters: where j.value = :{p} )""", '{c} contains "{v}"') ] if detect_json1() else []) + [ + TemplatedFilter('date', 'date', 'date({c}) = :{p}', '"{c}" is on date {v}'), TemplatedFilter('isnull', 'is null', '"{c}" is null', '{c} is null', no_argument=True), TemplatedFilter('notnull', 'is not null', '"{c}" is not null', '{c} is not null', no_argument=True), TemplatedFilter('isblank', 'is blank', '("{c}" is null or "{c}" = "")', '{c} is blank', no_argument=True), diff --git a/docs/json_api.rst b/docs/json_api.rst index 67700224..ef1b4548 100644 --- a/docs/json_api.rst +++ b/docs/json_api.rst @@ -233,6 +233,9 @@ You can filter the data returned by the table based on column values using a que This is only available if the ``json1`` SQLite extension is enabled. +``?column__date=value`` + Column is a datestamp occurring on the specified YYYY-MM-DD date, e.g. ``2018-01-02``. + ``?column__isnull=1`` Matches rows where the column is null. diff --git a/tests/test_filters.py b/tests/test_filters.py index a5d6e3d0..7b19c4e9 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -60,6 +60,14 @@ import pytest ['foo in (:p0, :p1, :p2)'], ["1", "2", "3"] ), + # date + ( + { + "foo__date": "1988-01-01", + }, + ["date(foo) = :p0"], + ["1988-01-01"] + ), # JSON array variants of __in (useful for unexpected characters) ( { From 9c77e6e355ec718d76178a7607721d10a66b6aef Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 15 Apr 2019 16:44:17 -0700 Subject: [PATCH 6/6] Support multiple filters of the same type Closes #288 --- datasette/views/table.py | 9 +++-- tests/test_api.py | 10 +++++ tests/test_filters.py | 87 ++++++++++++++++++++++------------------ 3 files changed, 63 insertions(+), 43 deletions(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 2c356bda..bc5e775e 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -219,13 +219,14 @@ class TableView(RowTableShared): # it can still be queried using ?_col__exact=blah special_args = {} special_args_lists = {} - other_args = {} + other_args = [] for key, value in args.items(): if key.startswith("_") and "__" not in key: special_args[key] = value[0] special_args_lists[key] = value else: - other_args[key] = value[0] + for v in value: + other_args.append((key, v)) # Handle ?_filter_column and redirect, if present redirect_params = filters_should_redirect(special_args) @@ -253,7 +254,7 @@ class TableView(RowTableShared): table_metadata = self.ds.table_metadata(database, table) units = table_metadata.get("units", {}) - filters = Filters(sorted(other_args.items()), units, ureg) + filters = Filters(sorted(other_args), units, ureg) where_clauses, params = filters.build_where_clauses(table) extra_wheres_for_ui = [] @@ -521,7 +522,7 @@ class TableView(RowTableShared): database, table, column, values )) for row in facet_rows: - selected = str(other_args.get(column)) == str(row["value"]) + selected = (column, str(row["value"])) in other_args if selected: toggle_path = path_with_removed_args( request, {column: str(row["value"])} diff --git a/tests/test_api.py b/tests/test_api.py index d6f612c8..53bf1d6e 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -903,6 +903,16 @@ def test_table_filter_queries(app_client, path, expected_rows): assert expected_rows == response.json['rows'] +def test_table_filter_queries_multiple_of_same_type(app_client): + response = app_client.get( + "/fixtures/simple_primary_key.json?content__not=world&content__not=hello" + ) + assert [ + ['3', ''], + ['4', 'RENDER_CELL_DEMO'] + ] == response.json['rows'] + + @pytest.mark.skipif( not detect_json1(), reason="Requires the SQLite json1 module" diff --git a/tests/test_filters.py b/tests/test_filters.py index 7b19c4e9..a905dd2e 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -4,88 +4,97 @@ import pytest @pytest.mark.parametrize('args,expected_where,expected_params', [ ( - { - 'name_english__contains': 'foo', - }, + ( + ('name_english__contains', 'foo'), + ), ['"name_english" like :p0'], ['%foo%'] ), ( - { - 'foo': 'bar', - 'bar__contains': 'baz', - }, + ( + ('foo', 'bar'), + ('bar__contains', 'baz'), + ), ['"bar" like :p0', '"foo" = :p1'], ['%baz%', 'bar'] ), ( - { - 'foo__startswith': 'bar', - 'bar__endswith': 'baz', - }, + ( + ('foo__startswith', 'bar'), + ('bar__endswith', 'baz'), + ), ['"bar" like :p0', '"foo" like :p1'], ['%baz', 'bar%'] ), ( - { - 'foo__lt': '1', - 'bar__gt': '2', - 'baz__gte': '3', - 'bax__lte': '4', - }, + ( + ('foo__lt', '1'), + ('bar__gt', '2'), + ('baz__gte', '3'), + ('bax__lte', '4'), + ), ['"bar" > :p0', '"bax" <= :p1', '"baz" >= :p2', '"foo" < :p3'], [2, 4, 3, 1] ), ( - { - 'foo__like': '2%2', - 'zax__glob': '3*', - }, + ( + ('foo__like', '2%2'), + ('zax__glob', '3*'), + ), ['"foo" like :p0', '"zax" glob :p1'], ['2%2', '3*'] ), + # Multiple like arguments: ( - { - 'foo__isnull': '1', - 'baz__isnull': '1', - 'bar__gt': '10' - }, + ( + ('foo__like', '2%2'), + ('foo__like', '3%3'), + ), + ['"foo" like :p0', '"foo" like :p1'], + ['2%2', '3%3'] + ), + ( + ( + ('foo__isnull', '1'), + ('baz__isnull', '1'), + ('bar__gt', '10'), + ), ['"bar" > :p0', '"baz" is null', '"foo" is null'], [10] ), ( - { - 'foo__in': '1,2,3', - }, + ( + ('foo__in', '1,2,3'), + ), ['foo in (:p0, :p1, :p2)'], ["1", "2", "3"] ), # date ( - { - "foo__date": "1988-01-01", - }, + ( + ("foo__date", "1988-01-01"), + ), ["date(foo) = :p0"], ["1988-01-01"] ), # JSON array variants of __in (useful for unexpected characters) ( - { - 'foo__in': '[1,2,3]', - }, + ( + ('foo__in', '[1,2,3]'), + ), ['foo in (:p0, :p1, :p2)'], [1, 2, 3] ), ( - { - 'foo__in': '["dog,cat", "cat[dog]"]', - }, + ( + ('foo__in', '["dog,cat", "cat[dog]"]'), + ), ['foo in (:p0, :p1)'], ["dog,cat", "cat[dog]"] ), ]) def test_build_where(args, expected_where, expected_params): - f = Filters(sorted(args.items())) + f = Filters(sorted(args)) sql_bits, actual_params = f.build_where_clauses("table") assert expected_where == sql_bits assert {