kopia lustrzana https://github.com/simonw/datasette
track_event() mechanism for analytics and plugins
* Closes #2240 * Documentation for event plugin hooks, refs #2240 * Include example track_event plugin in docs, refs #2240 * Tests for track_event() and register_events() hooks, refs #2240 * Initial documentation for core events, refs #2240 * Internals documentation for datasette.track_event()pull/2245/head
rodzic
890615b3f2
commit
bcc4f6bf1f
|
@ -1,5 +1,6 @@
|
|||
from datasette.permissions import Permission # noqa
|
||||
from datasette.version import __version_info__, __version__ # noqa
|
||||
from datasette.events import Event # noqa
|
||||
from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa
|
||||
from datasette.utils import actor_matches_allow # noqa
|
||||
from datasette.views import Context # noqa
|
||||
|
|
|
@ -34,6 +34,7 @@ from jinja2 import (
|
|||
from jinja2.environment import Template
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
from .events import Event
|
||||
from .views import Context
|
||||
from .views.base import ureg
|
||||
from .views.database import database_download, DatabaseView, TableCreateView
|
||||
|
@ -505,6 +506,14 @@ class Datasette:
|
|||
# This must be called for Datasette to be in a usable state
|
||||
if self._startup_invoked:
|
||||
return
|
||||
# Register event classes
|
||||
event_classes = []
|
||||
for hook in pm.hook.register_events(datasette=self):
|
||||
extra_classes = await await_me_maybe(hook)
|
||||
if extra_classes:
|
||||
event_classes.extend(extra_classes)
|
||||
self.event_classes = tuple(event_classes)
|
||||
|
||||
# Register permissions, but watch out for duplicate name/abbr
|
||||
names = {}
|
||||
abbrs = {}
|
||||
|
@ -873,6 +882,13 @@ class Datasette:
|
|||
result = await await_me_maybe(result)
|
||||
return result
|
||||
|
||||
async def track_event(self, event: Event):
|
||||
assert isinstance(event, self.event_classes), "Invalid event type: {}".format(
|
||||
type(event)
|
||||
)
|
||||
for hook in pm.hook.track_event(datasette=self, event=event):
|
||||
await await_me_maybe(hook)
|
||||
|
||||
async def permission_allowed(
|
||||
self, actor, action, resource=None, default=DEFAULT_NOT_SET
|
||||
):
|
||||
|
|
|
@ -0,0 +1,211 @@
|
|||
from abc import ABC, abstractproperty
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datasette.hookspecs import hookimpl
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class Event(ABC):
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
pass
|
||||
|
||||
created: datetime = field(
|
||||
init=False, default_factory=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
actor: Optional[dict]
|
||||
|
||||
def properties(self):
|
||||
properties = asdict(self)
|
||||
properties.pop("actor", None)
|
||||
properties.pop("created", None)
|
||||
return properties
|
||||
|
||||
|
||||
@dataclass
|
||||
class LoginEvent(Event):
|
||||
"""
|
||||
Event name: ``login``
|
||||
|
||||
A user (represented by ``event.actor``) has logged in.
|
||||
"""
|
||||
|
||||
name = "login"
|
||||
|
||||
|
||||
@dataclass
|
||||
class LogoutEvent(Event):
|
||||
"""
|
||||
Event name: ``logout``
|
||||
|
||||
A user (represented by ``event.actor``) has logged out.
|
||||
"""
|
||||
|
||||
name = "logout"
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateTokenEvent(Event):
|
||||
"""
|
||||
Event name: ``create-token``
|
||||
|
||||
A user created an API token.
|
||||
|
||||
:ivar expires_after: Number of seconds after which this token will expire.
|
||||
:type expires_after: int or None
|
||||
:ivar restrict_all: Restricted permissions for this token.
|
||||
:type restrict_all: list
|
||||
:ivar restrict_database: Restricted database permissions for this token.
|
||||
:type restrict_database: dict
|
||||
:ivar restrict_resource: Restricted resource permissions for this token.
|
||||
:type restrict_resource: dict
|
||||
"""
|
||||
|
||||
name = "create-token"
|
||||
expires_after: Optional[int]
|
||||
restrict_all: list
|
||||
restrict_database: dict
|
||||
restrict_resource: dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateTableEvent(Event):
|
||||
"""
|
||||
Event name: ``create-table``
|
||||
|
||||
A new table has been created in the database.
|
||||
|
||||
:ivar database: The name of the database where the table was created.
|
||||
:type database: str
|
||||
:ivar table: The name of the table that was created
|
||||
:type table: str
|
||||
:ivar schema: The SQL schema definition for the new table.
|
||||
:type schema: str
|
||||
"""
|
||||
|
||||
name = "create-table"
|
||||
database: str
|
||||
table: str
|
||||
schema: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class DropTableEvent(Event):
|
||||
"""
|
||||
Event name: ``drop-table``
|
||||
|
||||
A table has been dropped from the database.
|
||||
|
||||
:ivar database: The name of the database where the table was dropped.
|
||||
:type database: str
|
||||
:ivar table: The name of the table that was dropped
|
||||
:type table: str
|
||||
"""
|
||||
|
||||
name = "drop-table"
|
||||
database: str
|
||||
table: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class InsertRowsEvent(Event):
|
||||
"""
|
||||
Event name: ``insert-rows``
|
||||
|
||||
Rows were inserted into a table.
|
||||
|
||||
:ivar database: The name of the database where the rows were inserted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the rows were inserted.
|
||||
:type table: str
|
||||
:ivar num_rows: The number of rows that were requested to be inserted.
|
||||
:type num_rows: int
|
||||
:ivar ignore: Was ignore set?
|
||||
:type ignore: bool
|
||||
:ivar replace: Was replace set?
|
||||
:type replace: bool
|
||||
"""
|
||||
|
||||
name = "insert-rows"
|
||||
database: str
|
||||
table: str
|
||||
num_rows: int
|
||||
ignore: bool
|
||||
replace: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpsertRowsEvent(Event):
|
||||
"""
|
||||
Event name: ``upsert-rows``
|
||||
|
||||
Rows were upserted into a table.
|
||||
|
||||
:ivar database: The name of the database where the rows were inserted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the rows were inserted.
|
||||
:type table: str
|
||||
:ivar num_rows: The number of rows that were requested to be inserted.
|
||||
:type num_rows: int
|
||||
"""
|
||||
|
||||
name = "upsert-rows"
|
||||
database: str
|
||||
table: str
|
||||
num_rows: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpdateRowEvent(Event):
|
||||
"""
|
||||
Event name: ``update-row``
|
||||
|
||||
A row was updated in a table.
|
||||
|
||||
:ivar database: The name of the database where the row was updated.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the row was updated.
|
||||
:type table: str
|
||||
:ivar pks: The primary key values of the updated row.
|
||||
"""
|
||||
|
||||
name = "update-row"
|
||||
database: str
|
||||
table: str
|
||||
pks: list
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeleteRowEvent(Event):
|
||||
"""
|
||||
Event name: ``delete-row``
|
||||
|
||||
A row was deleted from a table.
|
||||
|
||||
:ivar database: The name of the database where the row was deleted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the row was deleted.
|
||||
:type table: str
|
||||
:ivar pks: The primary key values of the deleted row.
|
||||
"""
|
||||
|
||||
name = "delete-row"
|
||||
database: str
|
||||
table: str
|
||||
pks: list
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_events():
|
||||
return [
|
||||
LoginEvent,
|
||||
LogoutEvent,
|
||||
CreateTableEvent,
|
||||
CreateTokenEvent,
|
||||
DropTableEvent,
|
||||
InsertRowsEvent,
|
||||
UpsertRowsEvent,
|
||||
UpdateRowEvent,
|
||||
DeleteRowEvent,
|
||||
]
|
|
@ -160,6 +160,16 @@ def handle_exception(datasette, request, exception):
|
|||
"""Handle an uncaught exception. Can return a Response or None."""
|
||||
|
||||
|
||||
@hookspec
|
||||
def track_event(datasette, event):
|
||||
"""Respond to an event tracked by Datasette"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def register_events(datasette):
|
||||
"""Return a list of Event subclasses to use with track_event()"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_homepage(datasette, request):
|
||||
"""HTML to include at the top of the homepage"""
|
||||
|
|
|
@ -27,6 +27,7 @@ DEFAULT_PLUGINS = (
|
|||
"datasette.default_menu_links",
|
||||
"datasette.handle_exception",
|
||||
"datasette.forbidden",
|
||||
"datasette.events",
|
||||
)
|
||||
|
||||
pm = pluggy.PluginManager("datasette")
|
||||
|
|
|
@ -10,6 +10,7 @@ import re
|
|||
import sqlite_utils
|
||||
import textwrap
|
||||
|
||||
from datasette.events import CreateTableEvent
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.utils import (
|
||||
add_cors_headers,
|
||||
|
@ -969,6 +970,11 @@ class TableCreateView(BaseView):
|
|||
}
|
||||
if rows:
|
||||
details["row_count"] = len(rows)
|
||||
await self.ds.track_event(
|
||||
CreateTableEvent(
|
||||
request.actor, database=db.name, table=table_name, schema=schema
|
||||
)
|
||||
)
|
||||
return Response.json(details, status=201)
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from datasette.utils.asgi import NotFound, Forbidden, Response
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.events import UpdateRowEvent, DeleteRowEvent
|
||||
from .base import DataView, BaseView, _error
|
||||
from datasette.utils import (
|
||||
make_slot_function,
|
||||
|
@ -200,6 +201,15 @@ class RowDeleteView(BaseView):
|
|||
except Exception as e:
|
||||
return _error([str(e)], 500)
|
||||
|
||||
await self.ds.track_event(
|
||||
DeleteRowEvent(
|
||||
actor=request.actor,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
pks=resolved.pk_values,
|
||||
)
|
||||
)
|
||||
|
||||
return Response.json({"ok": True}, status=200)
|
||||
|
||||
|
||||
|
@ -246,4 +256,14 @@ class RowUpdateView(BaseView):
|
|||
)
|
||||
rows = list(results.rows)
|
||||
result["row"] = dict(rows[0])
|
||||
|
||||
await self.ds.track_event(
|
||||
UpdateRowEvent(
|
||||
actor=request.actor,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
pks=resolved.pk_values,
|
||||
)
|
||||
)
|
||||
|
||||
return Response.json(result, status=200)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
from datasette.events import LogoutEvent, LoginEvent, CreateTokenEvent
|
||||
from datasette.utils.asgi import Response, Forbidden
|
||||
from datasette.utils import (
|
||||
actor_matches_allow,
|
||||
|
@ -80,9 +81,9 @@ class AuthTokenView(BaseView):
|
|||
if secrets.compare_digest(token, self.ds._root_token):
|
||||
self.ds._root_token = None
|
||||
response = Response.redirect(self.ds.urls.instance())
|
||||
response.set_cookie(
|
||||
"ds_actor", self.ds.sign({"a": {"id": "root"}}, "actor")
|
||||
)
|
||||
root_actor = {"id": "root"}
|
||||
response.set_cookie("ds_actor", self.ds.sign({"a": root_actor}, "actor"))
|
||||
await self.ds.track_event(LoginEvent(actor=root_actor))
|
||||
return response
|
||||
else:
|
||||
raise Forbidden("Invalid token")
|
||||
|
@ -105,6 +106,7 @@ class LogoutView(BaseView):
|
|||
response = Response.redirect(self.ds.urls.instance())
|
||||
response.set_cookie("ds_actor", "", expires=0, max_age=0)
|
||||
self.ds.add_message(request, "You are now logged out", self.ds.WARNING)
|
||||
await self.ds.track_event(LogoutEvent(actor=request.actor))
|
||||
return response
|
||||
|
||||
|
||||
|
@ -349,6 +351,15 @@ class CreateTokenView(BaseView):
|
|||
restrict_resource=restrict_resource,
|
||||
)
|
||||
token_bits = self.ds.unsign(token[len("dstok_") :], namespace="token")
|
||||
await self.ds.track_event(
|
||||
CreateTokenEvent(
|
||||
actor=request.actor,
|
||||
expires_after=expires_after,
|
||||
restrict_all=restrict_all,
|
||||
restrict_database=restrict_database,
|
||||
restrict_resource=restrict_resource,
|
||||
)
|
||||
)
|
||||
context = await self.shared(request)
|
||||
context.update({"errors": errors, "token": token, "token_bits": token_bits})
|
||||
return await self.render(["create_token.html"], request, context)
|
||||
|
|
|
@ -8,6 +8,7 @@ import markupsafe
|
|||
|
||||
from datasette.plugins import pm
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.events import DropTableEvent, InsertRowsEvent, UpsertRowsEvent
|
||||
from datasette import tracer
|
||||
from datasette.utils import (
|
||||
add_cors_headers,
|
||||
|
@ -467,6 +468,8 @@ class TableInsertView(BaseView):
|
|||
if errors:
|
||||
return _error(errors, 400)
|
||||
|
||||
num_rows = len(rows)
|
||||
|
||||
# No that we've passed pks to _validate_data it's safe to
|
||||
# fix the rowids case:
|
||||
if not pks:
|
||||
|
@ -527,6 +530,29 @@ class TableInsertView(BaseView):
|
|||
result["rows"] = [dict(r) for r in fetched_rows.rows]
|
||||
else:
|
||||
result["rows"] = rows
|
||||
# We track the number of rows requested, but do not attempt to show which were actually
|
||||
# inserted or upserted v.s. ignored
|
||||
if upsert:
|
||||
await self.ds.track_event(
|
||||
UpsertRowsEvent(
|
||||
actor=request.actor,
|
||||
database=database_name,
|
||||
table=table_name,
|
||||
num_rows=num_rows,
|
||||
)
|
||||
)
|
||||
else:
|
||||
await self.ds.track_event(
|
||||
InsertRowsEvent(
|
||||
actor=request.actor,
|
||||
database=database_name,
|
||||
table=table_name,
|
||||
num_rows=num_rows,
|
||||
ignore=bool(ignore),
|
||||
replace=bool(replace),
|
||||
)
|
||||
)
|
||||
|
||||
return Response.json(result, status=200 if upsert else 201)
|
||||
|
||||
|
||||
|
@ -587,6 +613,11 @@ class TableDropView(BaseView):
|
|||
sqlite_utils.Database(conn)[table_name].drop()
|
||||
|
||||
await db.execute_write_fn(drop_table)
|
||||
await self.ds.track_event(
|
||||
DropTableEvent(
|
||||
actor=request.actor, database=database_name, table=table_name
|
||||
)
|
||||
)
|
||||
return Response.json({"ok": True}, status=200)
|
||||
|
||||
|
||||
|
|
|
@ -40,6 +40,8 @@ extensions = [
|
|||
if not os.environ.get("DISABLE_SPHINX_INLINE_TABS"):
|
||||
extensions += ["sphinx_inline_tabs"]
|
||||
|
||||
autodoc_member_order = "bysource"
|
||||
|
||||
extlinks = {
|
||||
"issue": ("https://github.com/simonw/datasette/issues/%s", "#%s"),
|
||||
}
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
.. _events:
|
||||
|
||||
Events
|
||||
======
|
||||
|
||||
Datasette includes a mechanism for tracking events that occur while the software is running. This is primarily intended to be used by plugins, which can both trigger events and listen for events.
|
||||
|
||||
The core Datasette application triggers events when certain things happen. This page describes those events.
|
||||
|
||||
Plugins can listen for events using the :ref:`plugin_hook_track_event` plugin hook, which will be called with instances of the following classes (or additional classes registered by other plugins):
|
||||
|
||||
.. automodule:: datasette.events
|
||||
:members:
|
||||
:exclude-members: Event
|
|
@ -63,5 +63,6 @@ Contents
|
|||
plugin_hooks
|
||||
testing_plugins
|
||||
internals
|
||||
events
|
||||
contributing
|
||||
changelog
|
||||
|
|
|
@ -593,6 +593,26 @@ Using either of these pattern will result in the in-memory database being served
|
|||
|
||||
This removes a database that has been previously added. ``name=`` is the unique name of that database.
|
||||
|
||||
.. _datasette_track_event:
|
||||
|
||||
await .track_event(event)
|
||||
-------------------------
|
||||
|
||||
``event`` - ``Event``
|
||||
An instance of a subclass of ``datasette.events.Event``.
|
||||
|
||||
Plugins can call this to track events, using classes they have previously registered. See :ref:`plugin_event_tracking` for details.
|
||||
|
||||
The event will then be passed to all plugins that have registered to receive events using the :ref:`plugin_hook_track_event` hook.
|
||||
|
||||
Example usage, assuming the plugin has previously registered the ``BanUserEvent`` class:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
await datasette.track_event(
|
||||
BanUserEvent(user={"id": 1, "username": "cleverbot"})
|
||||
)
|
||||
|
||||
.. _datasette_sign:
|
||||
|
||||
.sign(value, namespace="default")
|
||||
|
|
|
@ -1759,3 +1759,103 @@ top_canned_query(datasette, request, database, query_name)
|
|||
The name of the canned query.
|
||||
|
||||
Returns HTML to be displayed at the top of the canned query page.
|
||||
|
||||
.. _plugin_event_tracking:
|
||||
|
||||
Event tracking
|
||||
--------------
|
||||
|
||||
Datasette includes an internal mechanism for tracking analytical events. This can be used for analytics, but can also be used by plugins that want to listen out for when key events occur (such as a table being created) and take action in response.
|
||||
|
||||
Plugins can register to receive events using the ``track_event`` plugin hook.
|
||||
|
||||
They can also define their own events for other plugins to receive using the ``register_events`` plugin hook, combined with calls to the ``datasette.track_event(...)`` internal method.
|
||||
|
||||
.. _plugin_hook_track_event:
|
||||
|
||||
track_event(datasette, event)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
``event`` - ``Event``
|
||||
Information about the event, represented as an instance of a subclass of the ``Event`` base class.
|
||||
|
||||
This hook will be called any time an event is tracked by code that calls the :ref:`datasette.track_event(...) <datasette_track_event>` internal method.
|
||||
|
||||
The ``event`` object will always have the following properties:
|
||||
|
||||
- ``name``: a string representing the name of the event, for example ``logout`` or ``create-table``.
|
||||
- ``actor``: a dictionary representing the actor that triggered the event, or ``None`` if the event was not triggered by an actor.
|
||||
- ``created``: a ``datatime.datetime`` object in the ``timezone.utc`` timezone representing the time the event object was created.
|
||||
|
||||
Other properties on the event will be available depending on the type of event. You can also access those as a dictionary using ``event.properties()``.
|
||||
|
||||
The events fired by Datasette core are :ref:`documented here <events>`.
|
||||
|
||||
This example plugin logs details of all events to standard error:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
@hookimpl
|
||||
def track_event(event):
|
||||
name = event.name
|
||||
actor = event.actor
|
||||
properties = event.properties()
|
||||
msg = json.dumps(
|
||||
{
|
||||
"name": name,
|
||||
"actor": actor,
|
||||
"properties": properties,
|
||||
}
|
||||
)
|
||||
print(msg, file=sys.stderr, flush=True)
|
||||
|
||||
|
||||
.. _plugin_hook_register_events:
|
||||
|
||||
register_events(datasette)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
This hook should return a list of ``Event`` subclasses that represent custom events that the plugin might send to the ``datasette.track_event()`` method.
|
||||
|
||||
This example registers event subclasses for ``ban-user`` and ``unban-user`` events:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datasette import hookimpl, Event
|
||||
|
||||
|
||||
@dataclass
|
||||
class BanUserEvent(Event):
|
||||
name = "ban-user"
|
||||
user: dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnbanUserEvent(Event):
|
||||
name = "unban-user"
|
||||
user: dict
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_events():
|
||||
return [BanUserEvent, UnbanUserEvent]
|
||||
|
||||
The plugin can then call ``datasette.track_event(...)`` to send a ``ban-user`` event:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
await datasette.track_event(
|
||||
BanUserEvent(user={"id": 1, "username": "cleverbot"})
|
||||
)
|
||||
|
|
|
@ -228,6 +228,15 @@ If you run ``datasette plugins --all`` it will include default plugins that ship
|
|||
"skip_csrf"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "datasette.events",
|
||||
"static": false,
|
||||
"templates": false,
|
||||
"version": null,
|
||||
"hooks": [
|
||||
"register_events"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "datasette.facets",
|
||||
"static": false,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import asyncio
|
||||
import httpx
|
||||
import os
|
||||
import pathlib
|
||||
|
@ -8,7 +7,8 @@ import re
|
|||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
import trustme
|
||||
from dataclasses import dataclass, field
|
||||
from datasette import Event, hookimpl
|
||||
|
||||
|
||||
try:
|
||||
|
@ -164,6 +164,35 @@ def check_permission_actions_are_documented():
|
|||
)
|
||||
|
||||
|
||||
class TrackEventPlugin:
|
||||
__name__ = "TrackEventPlugin"
|
||||
|
||||
@dataclass
|
||||
class OneEvent(Event):
|
||||
name = "one"
|
||||
|
||||
extra: str
|
||||
|
||||
@hookimpl
|
||||
def register_events(self, datasette):
|
||||
async def inner():
|
||||
return [self.OneEvent]
|
||||
|
||||
return inner
|
||||
|
||||
@hookimpl
|
||||
def track_event(self, datasette, event):
|
||||
datasette._tracked_events = getattr(datasette, "_tracked_events", [])
|
||||
datasette._tracked_events.append(event)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def install_event_tracking_plugin():
|
||||
from datasette.plugins import pm
|
||||
|
||||
pm.register(TrackEventPlugin(), name="TrackEventPlugin")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def ds_localhost_http_server():
|
||||
ds_proc = subprocess.Popen(
|
||||
|
|
|
@ -786,7 +786,12 @@ async def test_threads_json(ds_client):
|
|||
@pytest.mark.asyncio
|
||||
async def test_plugins_json(ds_client):
|
||||
response = await ds_client.get("/-/plugins.json")
|
||||
assert EXPECTED_PLUGINS == sorted(response.json(), key=lambda p: p["name"])
|
||||
# Filter out TrackEventPlugin
|
||||
actual_plugins = sorted(
|
||||
[p for p in response.json() if p["name"] != "TrackEventPlugin"],
|
||||
key=lambda p: p["name"],
|
||||
)
|
||||
assert EXPECTED_PLUGINS == actual_plugins
|
||||
# Try with ?all=1
|
||||
response = await ds_client.get("/-/plugins.json?all=1")
|
||||
names = {p["name"] for p in response.json()}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from datasette.app import Datasette
|
||||
from datasette.utils import sqlite3
|
||||
from .utils import last_event
|
||||
import pytest
|
||||
import time
|
||||
|
||||
|
@ -49,6 +50,14 @@ async def test_insert_row(ds_write):
|
|||
assert response.json()["rows"] == [expected_row]
|
||||
rows = (await ds_write.get_database("data").execute("select * from docs")).rows
|
||||
assert dict(rows[0]) == expected_row
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "insert-rows"
|
||||
assert event.num_rows == 1
|
||||
assert event.database == "data"
|
||||
assert event.table == "docs"
|
||||
assert not event.ignore
|
||||
assert not event.replace
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -68,6 +77,16 @@ async def test_insert_rows(ds_write, return_rows):
|
|||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "insert-rows"
|
||||
assert event.num_rows == 20
|
||||
assert event.database == "data"
|
||||
assert event.table == "docs"
|
||||
assert not event.ignore
|
||||
assert not event.replace
|
||||
|
||||
actual_rows = [
|
||||
dict(r)
|
||||
for r in (
|
||||
|
@ -353,6 +372,16 @@ async def test_insert_ignore_replace(
|
|||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "insert-rows"
|
||||
assert event.num_rows == 1
|
||||
assert event.database == "data"
|
||||
assert event.table == "docs"
|
||||
assert event.ignore == ignore
|
||||
assert event.replace == replace
|
||||
|
||||
actual_rows = [
|
||||
dict(r)
|
||||
for r in (
|
||||
|
@ -427,6 +456,14 @@ async def test_upsert(ds_write, initial, input, expected_rows, should_return):
|
|||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["ok"] is True
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "upsert-rows"
|
||||
assert event.num_rows == 1
|
||||
assert event.database == "data"
|
||||
assert event.table == "upsert_test"
|
||||
|
||||
if should_return:
|
||||
# We only expect it to return rows corresponding to those we sent
|
||||
expected_returned_rows = expected_rows[: len(input["rows"])]
|
||||
|
@ -530,6 +567,13 @@ async def test_delete_row(ds_write, table, row_for_create, pks, delete_path):
|
|||
headers=_headers(write_token(ds_write)),
|
||||
)
|
||||
assert delete_response.status_code == 200
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "delete-row"
|
||||
assert event.database == "data"
|
||||
assert event.table == table
|
||||
assert event.pks == str(delete_path).split(",")
|
||||
assert (
|
||||
await ds_write.client.get(
|
||||
"/data.json?_shape=arrayfirst&sql=select+count(*)+from+{}".format(table)
|
||||
|
@ -610,6 +654,13 @@ async def test_update_row(ds_write, input, expected_errors, use_return):
|
|||
for k, v in input.items():
|
||||
assert returned_row[k] == v
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.actor == {"id": "root", "token": "dstok"}
|
||||
assert event.database == "data"
|
||||
assert event.table == "docs"
|
||||
assert event.pks == [str(pk)]
|
||||
|
||||
# And fetch the row to check it's updated
|
||||
response = await ds_write.client.get(
|
||||
"/data/docs/{}.json?_shape=array".format(pk),
|
||||
|
@ -676,6 +727,13 @@ async def test_drop_table(ds_write, scenario):
|
|||
headers=_headers(token),
|
||||
)
|
||||
assert response2.json() == {"ok": True}
|
||||
# Check event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "drop-table"
|
||||
assert event.actor == {"id": "root", "token": "dstok"}
|
||||
assert event.table == "docs"
|
||||
assert event.database == "data"
|
||||
# Table should 404
|
||||
assert (await ds_write.client.get("/data/docs")).status_code == 404
|
||||
|
||||
|
||||
|
@ -1096,6 +1154,12 @@ async def test_create_table(ds_write, input, expected_status, expected_response)
|
|||
assert response.status_code == expected_status
|
||||
data = response.json()
|
||||
assert data == expected_response
|
||||
# create-table event
|
||||
if expected_status == 201:
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "create-table"
|
||||
assert event.actor == {"id": "root", "token": "dstok"}
|
||||
assert event.schema.startswith("CREATE TABLE ")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from bs4 import BeautifulSoup as Soup
|
||||
from .fixtures import app_client
|
||||
from .utils import cookie_was_deleted
|
||||
from .utils import cookie_was_deleted, last_event
|
||||
from click.testing import CliRunner
|
||||
from datasette.utils import baseconv
|
||||
from datasette.cli import cli
|
||||
|
@ -19,6 +19,10 @@ async def test_auth_token(ds_client):
|
|||
assert {"a": {"id": "root"}} == ds_client.ds.unsign(
|
||||
response.cookies["ds_actor"], "actor"
|
||||
)
|
||||
# Should have recorded a login event
|
||||
event = last_event(ds_client.ds)
|
||||
assert event.name == "login"
|
||||
assert event.actor == {"id": "root"}
|
||||
# Check that a second with same token fails
|
||||
assert ds_client.ds._root_token is None
|
||||
assert (await ds_client.get(path)).status_code == 403
|
||||
|
@ -57,7 +61,7 @@ async def test_actor_cookie_that_expires(ds_client, offset, expected):
|
|||
cookie = ds_client.ds.sign(
|
||||
{"a": {"id": "test"}, "e": baseconv.base62.encode(expires_at)}, "actor"
|
||||
)
|
||||
response = await ds_client.get("/", cookies={"ds_actor": cookie})
|
||||
await ds_client.get("/", cookies={"ds_actor": cookie})
|
||||
assert ds_client.ds._last_request.scope["actor"] == expected
|
||||
|
||||
|
||||
|
@ -86,6 +90,10 @@ def test_logout(app_client):
|
|||
csrftoken_from=True,
|
||||
cookies={"ds_actor": app_client.actor_cookie({"id": "test"})},
|
||||
)
|
||||
# Should have recorded a logout event
|
||||
event = last_event(app_client.ds)
|
||||
assert event.name == "logout"
|
||||
assert event.actor == {"id": "test"}
|
||||
# The ds_actor cookie should have been unset
|
||||
assert cookie_was_deleted(response4, "ds_actor")
|
||||
# Should also have set a message
|
||||
|
@ -185,6 +193,13 @@ def test_auth_create_token(
|
|||
for error in errors:
|
||||
assert '<p class="message-error">{}</p>'.format(error) in response2.text
|
||||
else:
|
||||
# Check create-token event
|
||||
event = last_event(app_client.ds)
|
||||
assert event.name == "create-token"
|
||||
assert event.expires_after == expected_duration
|
||||
assert isinstance(event.restrict_all, list)
|
||||
assert isinstance(event.restrict_database, dict)
|
||||
assert isinstance(event.restrict_resource, dict)
|
||||
# Extract token from page
|
||||
token = response2.text.split('value="dstok_')[1].split('"')[0]
|
||||
details = app_client.ds.unsign(token, "token")
|
||||
|
|
|
@ -100,7 +100,11 @@ def test_spatialite_error_if_cannot_find_load_extension_spatialite():
|
|||
def test_plugins_cli(app_client):
|
||||
runner = CliRunner()
|
||||
result1 = runner.invoke(cli, ["plugins"])
|
||||
assert json.loads(result1.output) == EXPECTED_PLUGINS
|
||||
actual_plugins = sorted(
|
||||
[p for p in json.loads(result1.output) if p["name"] != "TrackEventPlugin"],
|
||||
key=lambda p: p["name"],
|
||||
)
|
||||
assert actual_plugins == EXPECTED_PLUGINS
|
||||
# Try with --all
|
||||
result2 = runner.invoke(cli, ["plugins", "--all"])
|
||||
names = [p["name"] for p in json.loads(result2.output)]
|
||||
|
|
|
@ -9,8 +9,9 @@ from .fixtures import (
|
|||
TestClient as _TestClient,
|
||||
) # noqa
|
||||
from click.testing import CliRunner
|
||||
from dataclasses import dataclass
|
||||
from datasette.app import Datasette
|
||||
from datasette import cli, hookimpl, Permission
|
||||
from datasette import cli, hookimpl, Event, Permission
|
||||
from datasette.filters import FilterArguments
|
||||
from datasette.plugins import get_plugins, DEFAULT_PLUGINS, pm
|
||||
from datasette.utils.sqlite import sqlite3
|
||||
|
@ -18,6 +19,7 @@ from datasette.utils import CustomRow, StartupError
|
|||
from jinja2.environment import Template
|
||||
from jinja2 import ChoiceLoader, FileSystemLoader
|
||||
import base64
|
||||
import datetime
|
||||
import importlib
|
||||
import json
|
||||
import os
|
||||
|
@ -1437,3 +1439,30 @@ async def test_hook_top_canned_query(ds_client):
|
|||
assert "Xtop_query:fixtures:from_hook:xyz" in response.text
|
||||
finally:
|
||||
pm.unregister(name="SlotPlugin")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_hook_track_event():
|
||||
datasette = Datasette(memory=True)
|
||||
from .conftest import TrackEventPlugin
|
||||
|
||||
await datasette.invoke_startup()
|
||||
await datasette.track_event(
|
||||
TrackEventPlugin.OneEvent(actor=None, extra="extra extra")
|
||||
)
|
||||
assert len(datasette._tracked_events) == 1
|
||||
assert isinstance(datasette._tracked_events[0], TrackEventPlugin.OneEvent)
|
||||
event = datasette._tracked_events[0]
|
||||
assert event.name == "one"
|
||||
assert event.properties() == {"extra": "extra extra"}
|
||||
# Should have a recent created as well
|
||||
created = event.created
|
||||
assert isinstance(created, datetime.datetime)
|
||||
assert created.tzinfo == datetime.timezone.utc
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_hook_register_events():
|
||||
datasette = Datasette(memory=True)
|
||||
await datasette.invoke_startup()
|
||||
assert any(k.__name__ == "OneEvent" for k in datasette.event_classes)
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
from datasette.utils.sqlite import sqlite3
|
||||
|
||||
|
||||
def last_event(datasette):
|
||||
events = getattr(datasette, "_tracked_events", [])
|
||||
return events[-1] if events else None
|
||||
|
||||
|
||||
def assert_footer_links(soup):
|
||||
footer_links = soup.find("footer").findAll("a")
|
||||
assert 4 == len(footer_links)
|
||||
|
|
Ładowanie…
Reference in New Issue