Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,9 @@ coverage.xml
.noseids
Procfile

# PyCharm
# editor
.idea
.vscode/

# Config
config.toml
12 changes: 10 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ Returns OpenAPI/Swagger documentation specific to this resource.

### Query Operators

The data endpoint can be queried with the following operators as query string (replacing `column_name` with the name of an actual column), if the column type allows it (see the swagger for each column's allowed parameter):
The data endpoint can be queried with the following operators as query string (replacing `column_name` with the name of an actual column), if the column type allows it (see the swagger for each column's allowed parameters):

#### Filtering Operators
```
Expand All @@ -220,12 +220,18 @@ column_name__exact=value
# differs
column_name__differs=value

# contains (for strings only)
# contains
column_name__contains=value

# notcontains (value does not contain)
column_name__notcontains=value

# in (value in list)
column_name__in=value1,value2,value3

# notin (value not in list)
column_name__notin=value1,value2,value3

# less
column_name__less=value

Expand Down Expand Up @@ -271,6 +277,8 @@ column_name__sum

> **Note**: Passing an aggregation operator (`count`, `avg`, `min`, `max`, `sum`) returns a column that is named `<column_name>__<operator>` (for instance: `?birth__groupby&score__sum` will return a list of dicts with the keys `birth` and `score__sum`).

> ⚠️ **WARNING**: columns that contain **JSON** objects (see the `profile` to know which ones do) **do not support filtering nor aggregation** for now.

#### Pagination
```
page=1 # Page number (default: 1)
Expand Down
60 changes: 54 additions & 6 deletions api_tabular/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,24 @@
from api_tabular import config

TYPE_POSSIBILITIES = {
"string": ["compare", "contains", "differs", "exact", "in", "sort", "groupby", "count"],
"string": [
"compare",
"contains",
"notcontains",
"differs",
"exact",
"in",
"notin",
"sort",
"groupby",
"count",
],
"float": [
"compare",
"differs",
"exact",
"in",
"notin",
"sort",
"groupby",
"count",
Expand All @@ -27,6 +39,7 @@
"differs",
"exact",
"in",
"notin",
"sort",
"groupby",
"count",
Expand All @@ -35,10 +48,33 @@
"min",
"sum",
],
"bool": ["differs", "exact", "in", "sort", "groupby", "count"],
"date": ["compare", "contains", "differs", "exact", "in", "sort", "groupby", "count"],
"datetime": ["compare", "contains", "differs", "exact", "in", "sort", "groupby", "count"],
"json": ["contains", "differs", "exact", "in", "groupby", "count"],
"bool": ["differs", "exact", "sort", "groupby", "count"],
"date": [
"compare",
"contains",
"notcontains",
"differs",
"exact",
"in",
"notin",
"sort",
"groupby",
"count",
],
"datetime": [
"compare",
"contains",
"notcontains",
"differs",
"exact",
"in",
"notin",
"sort",
"groupby",
"count",
],
# TODO: JSON needs special treatment for operators to work
"json": [],
}

MAP_TYPES = {
Expand All @@ -61,10 +97,18 @@
"name": "{}__contains",
"description": "String contains in column: {} ({}__contains=value)",
},
"notcontains": {
"name": "{}__notcontains",
"description": "String does not contain in column: {} ({}__notcontains=value)",
},
"in": {
"name": "{}__in",
"description": "Value in list in column: {} ({}__in=value1,value2,...)",
},
"notin": {
"name": "{}__notin",
"description": "Value not in list in column: {} ({}__notin=value1,value2,...)",
},
"groupby": {
"name": "{}__groupby",
"description": "Performs `group by values` operation in column: {}",
Expand Down Expand Up @@ -114,7 +158,7 @@ def build_sql_query_string(
request_arg: list,
resource_id: str | None = None,
indexes: set | None = None,
page_size: int = None,
page_size: int | None = None,
offset: int = 0,
) -> str:
sql_query = []
Expand Down Expand Up @@ -190,8 +234,12 @@ def add_filter(argument: str, value: str, indexes: set | None) -> tuple[str | No
return f"{column}=neq.{value}", False
elif normalized_comparator == "contains":
return f"{column}=ilike.*{value}*", False
elif normalized_comparator == "notcontains":
return f"{column}=not.ilike.*{value}*", False
elif normalized_comparator == "in":
return f"{column}=in.({value})", False
elif normalized_comparator == "notin":
return f"{column}=not.in.({value})", False
elif normalized_comparator == "less":
return f"{column}=lte.{value}", False
elif normalized_comparator == "greater":
Expand Down
8 changes: 4 additions & 4 deletions db/initdb/0-init.sql
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ CREATE TABLE "csvapi".eb7a008177131590c2f1a2ca0 (
decompte integer,
is_true boolean,
birth character varying,
liste character varying
liste JSON
);


Expand Down Expand Up @@ -1214,7 +1214,7 @@ CREATE TABLE "csvapi".s34fff81a3a7292c64a77e5cz (
decompte integer,
is_true boolean,
birth character varying,
liste character varying
liste JSON
);


Expand Down Expand Up @@ -1299,7 +1299,7 @@ CREATE TABLE "csvapi".aa2zoa2zfb243p45azj33ap1o (
decompte integer,
is_true boolean,
birth character varying,
liste character varying
liste JSON
);


Expand Down Expand Up @@ -1384,7 +1384,7 @@ CREATE TABLE "csvapi".p34zej8pnq446k2ejfz2m3dqz (
decompte integer,
is_true boolean,
birth character varying,
liste character varying
liste JSON
);


Expand Down
8 changes: 6 additions & 2 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,15 +308,19 @@ async def test_api_exception_resource_no_indexes(client, base_url, tables_index_
assert res.status == 200

# checking that the resource can be filtered on all columns
for col in detection["columns"].keys():
for col, results in detection["columns"].items():
if results["python_type"] == "json":
continue
res = await client.get(
f"{base_url}/api/resources/{_resource_id}/data/?{col}__exact=1&page=1&page_size=1"
)
assert res.status == 200

# if aggregation is allowed:
# checking whether aggregation is allowed on all columns or none
for col in detection["columns"].keys():
for col, results in detection["columns"].items():
if results["python_type"] == "json":
continue
res = await client.get(
f"{base_url}/api/resources/{_resource_id}/data/?{col}__groupby&page=1&page_size=1"
)
Expand Down