pull/2314/merge
Timothy O'Leary 2024-05-01 15:25:12 +00:00 zatwierdzone przez GitHub
commit d2f1191d66
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: B5690EEEBB952194
37 zmienionych plików z 731 dodań i 529 usunięć

8
.idea/.gitignore vendored 100644
Wyświetl plik

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

Wyświetl plik

@ -0,0 +1,57 @@
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<HTMLCodeStyleSettings>
<option name="HTML_SPACE_INSIDE_EMPTY_TAG" value="true" />
</HTMLCodeStyleSettings>
<JSCodeStyleSettings version="0">
<option name="FORCE_SEMICOLON_STYLE" value="true" />
<option name="SPACE_BEFORE_FUNCTION_LEFT_PARENTH" value="false" />
<option name="FORCE_QUOTE_STYlE" value="true" />
<option name="ENFORCE_TRAILING_COMMA" value="Remove" />
<option name="SPACES_WITHIN_OBJECT_LITERAL_BRACES" value="true" />
<option name="SPACES_WITHIN_IMPORTS" value="true" />
</JSCodeStyleSettings>
<TypeScriptCodeStyleSettings version="0">
<option name="FORCE_SEMICOLON_STYLE" value="true" />
<option name="SPACE_BEFORE_FUNCTION_LEFT_PARENTH" value="false" />
<option name="FORCE_QUOTE_STYlE" value="true" />
<option name="ENFORCE_TRAILING_COMMA" value="Remove" />
<option name="SPACES_WITHIN_OBJECT_LITERAL_BRACES" value="true" />
<option name="SPACES_WITHIN_IMPORTS" value="true" />
</TypeScriptCodeStyleSettings>
<VueCodeStyleSettings>
<option name="INTERPOLATION_NEW_LINE_AFTER_START_DELIMITER" value="false" />
<option name="INTERPOLATION_NEW_LINE_BEFORE_END_DELIMITER" value="false" />
</VueCodeStyleSettings>
<codeStyleSettings language="HTML">
<option name="SOFT_MARGINS" value="80" />
<indentOptions>
<option name="INDENT_SIZE" value="2" />
<option name="CONTINUATION_INDENT_SIZE" value="2" />
<option name="TAB_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="JavaScript">
<option name="SOFT_MARGINS" value="80" />
<indentOptions>
<option name="INDENT_SIZE" value="2" />
<option name="CONTINUATION_INDENT_SIZE" value="2" />
<option name="TAB_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="TypeScript">
<option name="SOFT_MARGINS" value="80" />
<indentOptions>
<option name="INDENT_SIZE" value="2" />
<option name="CONTINUATION_INDENT_SIZE" value="2" />
<option name="TAB_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="Vue">
<option name="SOFT_MARGINS" value="80" />
<indentOptions>
<option name="CONTINUATION_INDENT_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
</code_scheme>
</component>

Wyświetl plik

@ -0,0 +1,5 @@
<component name="ProjectCodeStyleConfiguration">
<state>
<option name="USE_PER_PROJECT_SETTINGS" value="true" />
</state>
</component>

Wyświetl plik

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="format" value="PLAIN" />
<option name="myDocStringFormat" value="Plain" />
</component>
<component name="TemplatesService">
<option name="TEMPLATE_CONFIGURATION" value="Jinja2" />
<option name="TEMPLATE_FOLDERS">
<list>
<option value="$MODULE_DIR$/datasette/templates" />
</list>
</option>
</component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="py.test" />
</component>
</module>

Wyświetl plik

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

7
.idea/misc.xml 100644
Wyświetl plik

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.8 (datasette) (2)" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (datasette) (2)" project-jdk-type="Python SDK" />
</project>

Wyświetl plik

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/datasette.iml" filepath="$PROJECT_DIR$/.idea/datasette.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml 100644
Wyświetl plik

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

Wyświetl plik

@ -1,7 +1,7 @@
from datasette.permissions import Permission # noqa
from datasette.version import __version_info__, __version__ # noqa
from datasette.events import Event # noqa
from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa
from datasette.views.error_module import Forbidden,NotFound,Response, Request
from datasette.utils import actor_matches_allow # noqa
from datasette.views import Context # noqa
from .hookspecs import hookimpl # noqa

Wyświetl plik

@ -60,11 +60,10 @@ from .views.row import RowView, RowDeleteView, RowUpdateView
from .renderer import json_renderer
from .url_builder import Urls
from .database import Database, QueryInterrupted
from datasette.views.error_module import StartupError
from .utils import (
PrefixedUrlString,
SPATIALITE_FUNCTIONS,
StartupError,
async_call_with_supported_arguments,
await_me_maybe,
call_with_supported_arguments,
@ -87,19 +86,14 @@ from .utils import (
)
from .utils.asgi import (
AsgiLifespan,
Forbidden,
NotFound,
DatabaseNotFound,
TableNotFound,
RowNotFound,
Request,
Response,
AsgiRunOnFirstRequest,
asgi_static,
asgi_send,
asgi_send_file,
asgi_send_redirect,
)
from .views.error_module import (Forbidden, NotFound, DatabaseNotFound,
TableNotFound, RowNotFound, Request, Response)
from .utils.internal_db import init_internal_db, populate_schema_tables
from .utils.sqlite import (
sqlite3,
@ -240,27 +234,27 @@ class Datasette:
ERROR = 3
def __init__(
self,
files=None,
immutables=None,
cache_headers=True,
cors=False,
inspect_data=None,
config=None,
metadata=None,
sqlite_extensions=None,
template_dir=None,
plugins_dir=None,
static_mounts=None,
memory=False,
settings=None,
secret=None,
version_note=None,
config_dir=None,
pdb=False,
crossdb=False,
nolock=False,
internal=None,
self,
files=None,
immutables=None,
cache_headers=True,
cors=False,
inspect_data=None,
config=None,
metadata=None,
sqlite_extensions=None,
template_dir=None,
plugins_dir=None,
static_mounts=None,
memory=False,
settings=None,
secret=None,
version_note=None,
config_dir=None,
pdb=False,
crossdb=False,
nolock=False,
internal=None,
):
self._startup_invoked = False
assert config_dir is None or isinstance(
@ -278,9 +272,9 @@ class Datasette:
db_files.extend(config_dir.glob("*.{}".format(ext)))
self.files += tuple(str(f) for f in db_files)
if (
config_dir
and (config_dir / "inspect-data.json").exists()
and not inspect_data
config_dir
and (config_dir / "inspect-data.json").exists()
and not inspect_data
):
inspect_data = json.loads((config_dir / "inspect-data.json").read_text())
if not immutables:
@ -447,7 +441,7 @@ class Datasette:
environment = self._jinja_env
if request:
for environment in pm.hook.jinja2_environment_from_request(
datasette=self, request=request, env=environment
datasette=self, request=request, env=environment
):
pass
return environment
@ -539,7 +533,7 @@ class Datasette:
abbrs[p.abbr] = p
self.permissions[p.name] = p
for hook in pm.hook.prepare_jinja2_environment(
env=self._jinja_env, datasette=self
env=self._jinja_env, datasette=self
):
await await_me_maybe(hook)
for hook in pm.hook.startup(datasette=self):
@ -553,13 +547,13 @@ class Datasette:
return URLSafeSerializer(self._secret, namespace).loads(signed)
def create_token(
self,
actor_id: str,
*,
expires_after: Optional[int] = None,
restrict_all: Optional[Iterable[str]] = None,
restrict_database: Optional[Dict[str, Iterable[str]]] = None,
restrict_resource: Optional[Dict[str, Dict[str, Iterable[str]]]] = None,
self,
actor_id: str,
*,
expires_after: Optional[int] = None,
restrict_all: Optional[Iterable[str]] = None,
restrict_database: Optional[Dict[str, Iterable[str]]] = None,
restrict_resource: Optional[Dict[str, Dict[str, Iterable[str]]]] = None,
):
token = {"a": actor_id, "t": int(time.time())}
if expires_after:
@ -652,12 +646,12 @@ class Datasette:
Returns None if metadata value is not found.
"""
assert not (
database is None and table is not None
database is None and table is not None
), "Cannot call metadata() with table= specified but not database="
metadata = {}
for hook_dbs in pm.hook.get_metadata(
datasette=self, key=key, database=database, table=table
datasette=self, key=key, database=database, table=table
):
metadata = self._metadata_recursive_update(metadata, hook_dbs)
@ -726,7 +720,7 @@ class Datasette:
if table:
table_plugin_config = (
((db_config.get("tables") or {}).get(table) or {}).get("plugins") or {}
((db_config.get("tables") or {}).get(table) or {}).get("plugins") or {}
).get(plugin_name)
# fallback to db_config or top-level config, in that order, if needed
@ -745,18 +739,18 @@ class Datasette:
if not hasattr(self, "_app_css_hash"):
with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp:
self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[
:6
]
:6
]
return self._app_css_hash
async def get_canned_queries(self, database_name, actor):
queries = (
((self.config or {}).get("databases") or {}).get(database_name) or {}
).get("queries") or {}
((self.config or {}).get("databases") or {}).get(database_name) or {}
).get("queries") or {}
for more_queries in pm.hook.canned_queries(
datasette=self,
database=database_name,
actor=actor,
datasette=self,
database=database_name,
actor=actor,
):
more_queries = await await_me_maybe(more_queries)
queries.update(more_queries or {})
@ -782,7 +776,7 @@ class Datasette:
"source_url": metadata.get("source_url") or self.metadata("source_url"),
"license": metadata.get("license") or self.metadata("license"),
"license_url": metadata.get("license_url")
or self.metadata("license_url"),
or self.metadata("license_url"),
"about": metadata.get("about") or self.metadata("about"),
"about_url": metadata.get("about_url") or self.metadata("about_url"),
}
@ -852,9 +846,9 @@ class Datasette:
# Database link
if database:
if await self.permission_allowed(
actor=actor,
action="view-database",
resource=database,
actor=actor,
action="view-database",
resource=database,
):
crumbs.append(
{
@ -866,9 +860,9 @@ class Datasette:
if table:
assert database, "table= requires database="
if await self.permission_allowed(
actor=actor,
action="view-table",
resource=(database, table),
actor=actor,
action="view-table",
resource=(database, table),
):
crumbs.append(
{
@ -879,7 +873,7 @@ class Datasette:
return crumbs
async def actors_from_ids(
self, actor_ids: Iterable[Union[str, int]]
self, actor_ids: Iterable[Union[str, int]]
) -> Dict[Union[id, str], Dict]:
result = pm.hook.actors_from_ids(datasette=self, actor_ids=actor_ids)
if result is None:
@ -896,7 +890,7 @@ class Datasette:
await await_me_maybe(hook)
async def permission_allowed(
self, actor, action, resource=None, *, default=DEFAULT_NOT_SET
self, actor, action, resource=None, *, default=DEFAULT_NOT_SET
):
"""Check permissions using the permissions_allowed plugin hook"""
result = None
@ -906,10 +900,10 @@ class Datasette:
opinions = []
# Every plugin is consulted for their opinion
for check in pm.hook.permission_allowed(
datasette=self,
actor=actor,
action=action,
resource=resource,
datasette=self,
actor=actor,
action=action,
resource=resource,
):
check = await await_me_maybe(check)
if check is not None:
@ -941,9 +935,9 @@ class Datasette:
return result
async def ensure_permissions(
self,
actor: dict,
permissions: Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]],
self,
actor: dict,
permissions: Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]],
):
"""
permissions is a list of (action, resource) tuples or 'action' strings
@ -976,18 +970,18 @@ class Datasette:
raise Forbidden(action)
async def check_visibility(
self,
actor: dict,
action: Optional[str] = None,
resource: Optional[Union[str, Tuple[str, str]]] = None,
permissions: Optional[
Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]]
] = None,
self,
actor: dict,
action: Optional[str] = None,
resource: Optional[Union[str, Tuple[str, str]]] = None,
permissions: Optional[
Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]]
] = None,
):
"""Returns (visible, private) - visible = can you see it, private = can others see it too"""
if permissions:
assert (
not action and not resource
not action and not resource
), "Can't use action= or resource= with permissions="
else:
permissions = [(action, resource)]
@ -1005,14 +999,14 @@ class Datasette:
return True, False
async def execute(
self,
db_name,
sql,
params=None,
truncate=False,
custom_time_limit=None,
page_size=None,
log_sql_errors=True,
self,
db_name,
sql,
params=None,
truncate=False,
custom_time_limit=None,
page_size=None,
log_sql_errors=True,
):
return await self.databases[db_name].execute(
sql,
@ -1076,7 +1070,7 @@ class Datasette:
def absolute_url(self, request, path):
url = urllib.parse.urljoin(request.url, path)
if url.startswith("http://") and self.setting("force_https_urls"):
url = "https://" + url[len("http://") :]
url = "https://" + url[len("http://"):]
return url
def _register_custom_units(self):
@ -1104,8 +1098,8 @@ class Datasette:
sqlite_version = conn.execute("select sqlite_version()").fetchone()[0]
sqlite_extensions = {}
for extension, testsql, hasversion in (
("json1", "SELECT json('{}')", False),
("spatialite", "SELECT spatialite_version()", True),
("json1", "SELECT json('{}')", False),
("spatialite", "SELECT spatialite_version()", True),
):
try:
result = conn.execute(testsql)
@ -1249,11 +1243,11 @@ class Datasette:
)
async def render_template(
self,
templates: Union[List[str], str, Template],
context: Optional[Union[Dict[str, Any], Context]] = None,
request: Optional[Request] = None,
view_name: Optional[str] = None,
self,
templates: Union[List[str], str, Template],
context: Optional[Union[Dict[str, Any], Context]] = None,
request: Optional[Request] = None,
view_name: Optional[str] = None,
):
if not self._startup_invoked:
raise Exception("render_template() called before await ds.invoke_startup()")
@ -1269,13 +1263,13 @@ class Datasette:
body_scripts = []
# pylint: disable=no-member
for extra_script in pm.hook.extra_body_script(
template=template.name,
database=context.get("database"),
table=context.get("table"),
columns=context.get("columns"),
view_name=view_name,
request=request,
datasette=self,
template=template.name,
database=context.get("database"),
table=context.get("table"),
columns=context.get("columns"),
view_name=view_name,
request=request,
datasette=self,
):
extra_script = await await_me_maybe(extra_script)
if isinstance(extra_script, dict):
@ -1289,13 +1283,13 @@ class Datasette:
extra_template_vars = {}
# pylint: disable=no-member
for extra_vars in pm.hook.extra_template_vars(
template=template.name,
database=context.get("database"),
table=context.get("table"),
columns=context.get("columns"),
view_name=view_name,
request=request,
datasette=self,
template=template.name,
database=context.get("database"),
table=context.get("table"),
columns=context.get("columns"),
view_name=view_name,
request=request,
datasette=self,
):
extra_vars = await await_me_maybe(extra_vars)
assert isinstance(extra_vars, dict), "extra_vars is of type {}".format(
@ -1306,9 +1300,9 @@ class Datasette:
async def menu_links():
links = []
for hook in pm.hook.menu_links(
datasette=self,
actor=request.actor if request else None,
request=request or None,
datasette=self,
actor=request.actor if request else None,
request=request or None,
):
extra_links = await await_me_maybe(hook)
if extra_links:
@ -1325,8 +1319,8 @@ class Datasette:
"menu_links": menu_links,
"display_actor": display_actor,
"show_logout": request is not None
and "ds_actor" in request.cookies
and request.actor,
and "ds_actor" in request.cookies
and request.actor,
"app_css_hash": self.app_css_hash(),
"zip": zip,
"body_scripts": body_scripts,
@ -1356,13 +1350,13 @@ class Datasette:
seen_urls = set()
collected = []
for hook in getattr(pm.hook, key)(
template=template.name,
database=context.get("database"),
table=context.get("table"),
columns=context.get("columns"),
view_name=view_name,
request=request,
datasette=self,
template=template.name,
database=context.get("database"),
table=context.get("table"),
columns=context.get("columns"),
view_name=view_name,
request=request,
datasette=self,
):
hook = await await_me_maybe(hook)
collected.extend(hook)
@ -1553,7 +1547,8 @@ class Datasette:
return self.get_database(route=database_route)
except KeyError:
raise DatabaseNotFound(
"Database not found: {}".format(database_route), database_route
"Invalid Database: The database {} was not found. Return to the previous page below to ensure your own Database was created properly.".format(
database_route), database_route
)
async def resolve_table(self, request):
@ -1566,7 +1561,8 @@ class Datasette:
is_view = await db.view_exists(table_name)
if not (table_exists or is_view):
raise TableNotFound(
"Table not found: {}".format(table_name), db.name, table_name
"Invalid Table: {} was not found. Return to the previous page below, table is not present within the Database file.".format(
table_name), db.name, table_name
)
return ResolvedTable(db, table_name, is_view)
@ -1578,7 +1574,8 @@ class Datasette:
row = results.first()
if row is None:
raise RowNotFound(
"Row not found: {}".format(pk_values), db.name, table_name, pk_values
"Invalid Row: The row id {} is invalid on the table. It may be spelled incorrectly or not present on the table, use the button below to go back.".format(
pk_values), db.name, table_name, pk_values
)
return ResolvedRow(db, table_name, sql, params, pks, pk_values, results.first())
@ -1628,7 +1625,7 @@ class DatasetteRouter:
# Strip off base_url if present before routing
base_url = self.ds.setting("base_url")
if base_url != "/" and path.startswith(base_url):
path = "/" + path[len(base_url) :]
path = "/" + path[len(base_url):]
scope = dict(scope, route_path=path)
request = Request(scope, receive)
# Populate request_messages if ds_messages cookie is present
@ -1642,9 +1639,9 @@ class DatasetteRouter:
scope_modifications = {}
# Apply force_https_urls, if set
if (
self.ds.setting("force_https_urls")
and scope["type"] == "http"
and scope.get("scheme") != "https"
self.ds.setting("force_https_urls")
and scope["type"] == "http"
and scope.get("scheme") != "https"
):
scope_modifications["scheme"] = "https"
# Handle authentication
@ -1675,7 +1672,7 @@ class DatasetteRouter:
except Forbidden as exception:
# Try the forbidden() plugin hook
for custom_response in pm.hook.forbidden(
datasette=self.ds, request=request, message=exception.args[0]
datasette=self.ds, request=request, message=exception.args[0]
):
custom_response = await await_me_maybe(custom_response)
assert (
@ -1721,7 +1718,7 @@ class DatasetteRouter:
if "{" in filepath and filepath.startswith("pages/")
]
page_routes = [
(route_pattern_from_filepath(filepath[len("pages/") :]), filepath)
(route_pattern_from_filepath(filepath[len("pages/"):]), filepath)
for filepath in pattern_templates
]
try:
@ -1793,9 +1790,9 @@ class DatasetteRouter:
async def handle_exception(self, request, send, exception):
responses = []
for hook in pm.hook.handle_exception(
datasette=self.ds,
request=request,
exception=exception,
datasette=self.ds,
request=request,
exception=exception,
):
response = await await_me_maybe(hook)
if response is not None:
@ -1935,8 +1932,8 @@ class DatasetteClient:
async def _request(self, method, path, **kwargs):
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=self.app),
cookies=kwargs.pop("cookies", None),
transport=httpx.ASGITransport(app=self.app),
cookies=kwargs.pop("cookies", None),
) as client:
return await getattr(client, method)(self._fix(path), **kwargs)
@ -1964,8 +1961,8 @@ class DatasetteClient:
async def request(self, method, path, **kwargs):
avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None)
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=self.app),
cookies=kwargs.pop("cookies", None),
transport=httpx.ASGITransport(app=self.app),
cookies=kwargs.pop("cookies", None),
) as client:
return await client.request(
method, self._fix(path, avoid_path_rewrites), **kwargs

Wyświetl plik

@ -1,5 +1,5 @@
from datasette import hookimpl
from datasette.utils.asgi import Response, BadRequest
from datasette.views.error_module import Response, BadRequest
from datasette.utils import to_css_class
import hashlib

Wyświetl plik

@ -21,9 +21,9 @@ from .app import (
SQLITE_LIMIT_ATTACHED,
pm,
)
from datasette.views.error_module import StartupError
from .utils import (
LoadExtension,
StartupError,
check_connection,
find_spatialite,
parse_metadata,
@ -248,20 +248,20 @@ def plugins(all, requirements, plugins_dir):
@click.option("--about", help="About label for metadata")
@click.option("--about_url", help="About URL for metadata")
def package(
files,
tag,
metadata,
extra_options,
branch,
template_dir,
plugins_dir,
static,
install,
spatialite,
version_note,
secret,
port,
**extra_metadata,
files,
tag,
metadata,
extra_options,
branch,
template_dir,
plugins_dir,
static,
install,
spatialite,
version_note,
secret,
port,
**extra_metadata,
):
"""Package SQLite files into a Datasette Docker container"""
if not shutil.which("docker"):
@ -274,20 +274,20 @@ def package(
)
sys.exit(1)
with temporary_docker_directory(
files,
"datasette",
metadata=metadata,
extra_options=extra_options,
branch=branch,
template_dir=template_dir,
plugins_dir=plugins_dir,
static=static,
install=install,
spatialite=spatialite,
version_note=version_note,
secret=secret,
extra_metadata=extra_metadata,
port=port,
files,
"datasette",
metadata=metadata,
extra_options=extra_options,
branch=branch,
template_dir=template_dir,
plugins_dir=plugins_dir,
static=static,
install=install,
spatialite=spatialite,
version_note=version_note,
secret=secret,
extra_metadata=extra_metadata,
port=port,
):
args = ["docker", "build"]
if tag:
@ -352,9 +352,9 @@ def uninstall(packages, yes):
"--host",
default="127.0.0.1",
help=(
"Host for server. Defaults to 127.0.0.1 which means only connections "
"from the local machine will be allowed. Use 0.0.0.0 to listen to "
"all IPs and allow access from other machines."
"Host for server. Defaults to 127.0.0.1 which means only connections "
"from the local machine will be allowed. Use 0.0.0.0 to listen to "
"all IPs and allow access from other machines."
),
)
@click.option(
@ -478,38 +478,38 @@ def uninstall(packages, yes):
help="Path to a persistent Datasette internal SQLite database",
)
def serve(
files,
immutable,
host,
port,
uds,
reload,
cors,
sqlite_extensions,
inspect_file,
metadata,
template_dir,
plugins_dir,
static,
memory,
config,
settings,
secret,
root,
get,
token,
actor,
version_note,
help_settings,
pdb,
open_browser,
create,
crossdb,
nolock,
ssl_keyfile,
ssl_certfile,
internal,
return_instance=False,
files,
immutable,
host,
port,
uds,
reload,
cors,
sqlite_extensions,
inspect_file,
metadata,
template_dir,
plugins_dir,
static,
memory,
config,
settings,
secret,
root,
get,
token,
actor,
version_note,
help_settings,
pdb,
open_browser,
create,
crossdb,
nolock,
ssl_keyfile,
ssl_certfile,
internal,
return_instance=False,
):
"""Serve up specified SQLite database files with a web UI"""
if help_settings:
@ -582,12 +582,16 @@ def serve(
# Verify list of files, create if needed (and --create)
for file in files:
files = list(files)
if files[0] == 'serve':
files.pop(0)
files = tuple(files)
if not pathlib.Path(file).exists():
if create:
sqlite3.connect(file).execute("vacuum")
else:
raise click.ClickException(
"Invalid value for '[FILES]...': Path '{}' does not exist.".format(
"Invalid value for '[FILES]...': Path '{}' does not exist".format(
file
)
)
@ -601,7 +605,6 @@ def serve(
raise click.ClickException("Could not find SpatiaLite extension")
except StartupError as e:
raise click.ClickException(e.args[0])
if return_instance:
# Private utility mechanism for writing unit tests
return ds
@ -708,7 +711,7 @@ def serve(
help="Path to directory containing custom plugins",
)
def create_token(
id, secret, expires_after, alls, databases, resources, debug, plugins_dir
id, secret, expires_after, alls, databases, resources, debug, plugins_dir
):
"""
Create a signed API token for the specified actor ID
@ -777,7 +780,7 @@ def create_token(
)
click.echo(token)
if debug:
encoded = token[len("dstok_") :]
encoded = token[len("dstok_"):]
click.echo("\nDecoded:\n")
click.echo(json.dumps(ds.unsign(encoded, namespace="token"), indent=2))
@ -810,9 +813,9 @@ async def check_databases(ds):
)
# If --crossdb and more than SQLITE_LIMIT_ATTACHED show warning
if (
ds.crossdb
and len([db for db in ds.databases.values() if not db.is_memory])
> SQLITE_LIMIT_ATTACHED
ds.crossdb
and len([db for db in ds.databases.values() if not db.is_memory])
> SQLITE_LIMIT_ATTACHED
):
msg = (
"Warning: --crossdb only works with the first {} attached databases".format(

Wyświetl plik

@ -20,7 +20,7 @@ from .utils import (
table_columns,
table_column_details,
)
from .inspect import inspect_hash
from .my_inspect import inspect_hash
connections = threading.local()

Wyświetl plik

@ -1,6 +1,5 @@
from datasette import hookimpl
from datasette.views.base import DatasetteError
from datasette.utils.asgi import BadRequest
from datasette.views.error_module import DatasetteError, BadRequest
import json
import numbers
from .utils import detect_json1, escape_sqlite, path_with_removed_args

Wyświetl plik

@ -1,5 +1,5 @@
from datasette import hookimpl, Response
from datasette import hookimpl
from datasette.views.error_module import Response
@hookimpl(trylast=True)
def forbidden(datasette, request, message):

Wyświetl plik

@ -1,9 +1,7 @@
from datasette import hookimpl, Response
from datasette import hookimpl
from .utils import add_cors_headers
from .utils.asgi import (
Base400,
)
from .views.base import DatasetteError
from .views.error_module import Response, Base400
from .views.error_module import DatasetteError
from markupsafe import Markup
import pdb
import traceback

Wyświetl plik

@ -1,6 +1,6 @@
import hashlib
from .utils import (
from datasette.utils import (
detect_spatialite,
detect_fts,
detect_primary_keys,

Wyświetl plik

@ -6,7 +6,7 @@ from datasette.utils import (
path_from_row_pks,
sqlite3,
)
from datasette.utils.asgi import Response
from datasette.views.error_module import Response
def convert_specific_columns_to_json(rows, columns, json_cols):

Wyświetl plik

@ -1,11 +1,10 @@
{% extends "base.html" %}
{% block title %}{% if title %}{{ title }}{% else %}Error {{ status }}{% endif %}{% endblock %}
{% block content %}
<h1>{% if title %}{{ title }}{% else %}Error {{ status }}{% endif %}</h1>
<div style="padding: 1em; margin: 1em 0; border: 3px solid red;">{{ error }}</div>
<a href="javascript:window.history.back();">Return to Previous</a>
{% endblock %}

Wyświetl plik

@ -469,6 +469,7 @@ def temporary_docker_directory(
file_names = [os.path.split(f)[-1] for f in files]
if metadata:
metadata_content = parse_metadata(metadata.read())
verify_metadata(metadata_content)
else:
metadata_content = {}
# Merge in the non-null values in extra_metadata
@ -521,6 +522,25 @@ def temporary_docker_directory(
os.chdir(saved_cwd)
def verify_metadata(metadata_content):
"""
Verify the validity of the metadata.
Args:
metadata_content (dict): The metadata to be verified.
Returns:
bool: True if the metadata is valid, False otherwise.
"""
required_keys = ["title", "license", "license_url", "source", "source_url", "about", "about_url"]
# Check if all required keys are present
for key in required_keys:
if key not in metadata_content:
print(f"Error: Missing required key '{key}' in metadata")
return False
return True
def detect_primary_keys(conn, table):
"""Figure out primary keys for a table."""
columns = table_column_details(conn, table)
@ -1127,10 +1147,6 @@ class PrefixedUrlString(str):
return super().__getattribute__(name)
class StartupError(Exception):
pass
_re_named_parameter = re.compile(":([a-zA-Z0-9_]+)")

Wyświetl plik

@ -1,161 +1,12 @@
import hashlib
import json
from datasette.utils import MultiParams, calculate_etag
from datasette.utils import calculate_etag
from mimetypes import guess_type
from urllib.parse import parse_qs, urlunparse, parse_qsl
from pathlib import Path
from http.cookies import SimpleCookie, Morsel
import aiofiles
import aiofiles.os
# Workaround for adding samesite support to pre 3.8 python
Morsel._reserved["samesite"] = "SameSite"
# Thanks, Starlette:
# https://github.com/encode/starlette/blob/519f575/starlette/responses.py#L17
class Base400(Exception):
status = 400
class NotFound(Base400):
status = 404
class DatabaseNotFound(NotFound):
def __init__(self, message, database_name):
super().__init__(message)
self.database_name = database_name
class TableNotFound(NotFound):
def __init__(self, message, database_name, table):
super().__init__(message)
self.database_name = database_name
self.table = table
class RowNotFound(NotFound):
def __init__(self, message, database_name, table, pk_values):
super().__init__(message)
self.database_name = database_name
self.table_name = table
self.pk_values = pk_values
class Forbidden(Base400):
status = 403
class BadRequest(Base400):
status = 400
SAMESITE_VALUES = ("strict", "lax", "none")
class Request:
def __init__(self, scope, receive):
self.scope = scope
self.receive = receive
def __repr__(self):
return '<asgi.Request method="{}" url="{}">'.format(self.method, self.url)
@property
def method(self):
return self.scope["method"]
@property
def url(self):
return urlunparse(
(self.scheme, self.host, self.path, None, self.query_string, None)
)
@property
def url_vars(self):
return (self.scope.get("url_route") or {}).get("kwargs") or {}
@property
def scheme(self):
return self.scope.get("scheme") or "http"
@property
def headers(self):
return {
k.decode("latin-1").lower(): v.decode("latin-1")
for k, v in self.scope.get("headers") or []
}
@property
def host(self):
return self.headers.get("host") or "localhost"
@property
def cookies(self):
cookies = SimpleCookie()
cookies.load(self.headers.get("cookie", ""))
return {key: value.value for key, value in cookies.items()}
@property
def path(self):
if self.scope.get("raw_path") is not None:
return self.scope["raw_path"].decode("latin-1").partition("?")[0]
else:
path = self.scope["path"]
if isinstance(path, str):
return path
else:
return path.decode("utf-8")
@property
def query_string(self):
return (self.scope.get("query_string") or b"").decode("latin-1")
@property
def full_path(self):
qs = self.query_string
return "{}{}".format(self.path, ("?" + qs) if qs else "")
@property
def args(self):
return MultiParams(parse_qs(qs=self.query_string, keep_blank_values=True))
@property
def actor(self):
return self.scope.get("actor", None)
async def post_body(self):
body = b""
more_body = True
while more_body:
message = await self.receive()
assert message["type"] == "http.request", message
body += message.get("body", b"")
more_body = message.get("more_body", False)
return body
async def post_vars(self):
body = await self.post_body()
return dict(parse_qsl(body.decode("utf-8"), keep_blank_values=True))
@classmethod
def fake(cls, path_with_query_string, method="GET", scheme="http", url_vars=None):
"""Useful for constructing Request objects for tests"""
path, _, query_string = path_with_query_string.partition("?")
scope = {
"http_version": "1.1",
"method": method,
"path": path,
"raw_path": path_with_query_string.encode("latin-1"),
"query_string": query_string.encode("latin-1"),
"scheme": scheme,
"type": "http",
}
if url_vars:
scope["url_route"] = {"kwargs": url_vars}
return cls(scope, None)
class AsgiLifespan:
def __init__(self, app, on_startup=None, on_shutdown=None):
@ -347,102 +198,6 @@ def asgi_static(root_path, chunk_size=4096, headers=None, content_type=None):
return inner_static
class Response:
def __init__(self, body=None, status=200, headers=None, content_type="text/plain"):
self.body = body
self.status = status
self.headers = headers or {}
self._set_cookie_headers = []
self.content_type = content_type
async def asgi_send(self, send):
headers = {}
headers.update(self.headers)
headers["content-type"] = self.content_type
raw_headers = [
[key.encode("utf-8"), value.encode("utf-8")]
for key, value in headers.items()
]
for set_cookie in self._set_cookie_headers:
raw_headers.append([b"set-cookie", set_cookie.encode("utf-8")])
await send(
{
"type": "http.response.start",
"status": self.status,
"headers": raw_headers,
}
)
body = self.body
if not isinstance(body, bytes):
body = body.encode("utf-8")
await send({"type": "http.response.body", "body": body})
def set_cookie(
self,
key,
value="",
max_age=None,
expires=None,
path="/",
domain=None,
secure=False,
httponly=False,
samesite="lax",
):
assert samesite in SAMESITE_VALUES, "samesite should be one of {}".format(
SAMESITE_VALUES
)
cookie = SimpleCookie()
cookie[key] = value
for prop_name, prop_value in (
("max_age", max_age),
("expires", expires),
("path", path),
("domain", domain),
("samesite", samesite),
):
if prop_value is not None:
cookie[key][prop_name.replace("_", "-")] = prop_value
for prop_name, prop_value in (("secure", secure), ("httponly", httponly)):
if prop_value:
cookie[key][prop_name] = True
self._set_cookie_headers.append(cookie.output(header="").strip())
@classmethod
def html(cls, body, status=200, headers=None):
return cls(
body,
status=status,
headers=headers,
content_type="text/html; charset=utf-8",
)
@classmethod
def text(cls, body, status=200, headers=None):
return cls(
str(body),
status=status,
headers=headers,
content_type="text/plain; charset=utf-8",
)
@classmethod
def json(cls, body, status=200, headers=None, default=None):
return cls(
json.dumps(body, default=default),
status=status,
headers=headers,
content_type="application/json; charset=utf-8",
)
@classmethod
def redirect(cls, path, status=302, headers=None):
headers = headers or {}
headers["Location"] = path
return cls("", status=status, headers=headers)
class AsgiFileDownload:
def __init__(
self,
@ -478,4 +233,4 @@ class AsgiRunOnFirstRequest:
self._started = True
for hook in self.on_startup:
await hook()
return await self.asgi(scope, receive, send)
return await self.asgi(scope, receive, send)

Wyświetl plik

@ -6,12 +6,12 @@ import textwrap
import time
import urllib
from markupsafe import escape
from .error_module import DatasetteError, Request, Response, NotFound, BadRequest
import pint
from datasette.database import QueryInterrupted
from datasette.utils.asgi import Request
from datasette.utils import (
add_cors_headers,
await_me_maybe,
@ -26,31 +26,12 @@ from datasette.utils import (
sqlite3,
)
from datasette.utils.asgi import (
AsgiStream,
NotFound,
Response,
BadRequest,
AsgiStream
)
ureg = pint.UnitRegistry()
class DatasetteError(Exception):
def __init__(
self,
message,
title=None,
error_dict=None,
status=500,
template=None,
message_is_html=False,
):
self.message = message
self.title = title
self.error_dict = error_dict or {}
self.status = status
self.message_is_html = message_is_html
class View:
async def head(self, request, datasette):
@ -410,11 +391,6 @@ class DataView(BaseView):
add_cors_headers(response.headers)
return response
def _error(messages, status=400):
return Response.json({"ok": False, "errors": messages}, status=status)
async def stream_csv(datasette, fetch_data, request, database):
kwargs = {}
stream = request.args.get("_stream")

Wyświetl plik

@ -31,10 +31,11 @@ from datasette.utils import (
truncate_url,
InvalidSql,
)
from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden
from datasette.utils.asgi import AsgiFileDownload
from datasette.plugins import pm
from .error_module import DatasetteError, _error, NotFound, Response, Forbidden
from .base import BaseView, DatasetteError, View, _error, stream_csv
from .base import BaseView, View, stream_csv
class DatabaseView(View):
@ -342,7 +343,7 @@ async def database_download(request, datasette):
class QueryView(View):
async def post(self, request, datasette):
from datasette.app import TableNotFound
from datasette.views.error_module import TableNotFound
db = await datasette.resolve_database(request)
@ -431,7 +432,7 @@ class QueryView(View):
return Response.redirect(redirect_url or request.path)
async def get(self, request, datasette):
from datasette.app import TableNotFound
from datasette.views.error_module import TableNotFound
db = await datasette.resolve_database(request)
database = db.name
@ -933,8 +934,25 @@ class TableCreateView(BaseView):
return _error(["columns must be a list of objects"])
if not column.get("name") or not isinstance(column.get("name"), str):
return _error(["Column name is required"])
# Check if type is specified
if not column.get("type"):
column["type"] = "text"
# If type is not specified, check the values in the column
column_values = [value for value in column.get("values", []) if value is not None]
# Check if all values in the column are integers
if all(isinstance(value, int) for value in column_values):
column["type"] = "integer"
# Check if all values in the column are floats
elif all(isinstance(value, float) for value in column_values):
column["type"] = "float"
# Check if all values in the column are booleans
elif all(isinstance(value, bool) for value in column_values):
column["type"] = "boolean"
# If values are not all integers, floats, or booleans, set type as "text"
else:
column["type"] = "text"
if column["type"] not in self._supported_column_types:
return _error(
["Unsupported column type: {}".format(column["type"])]
@ -1140,4 +1158,4 @@ async def display_rows(datasette, database, request, rows, columns):
display_value = display_value[:truncate_cells] + "\u2026"
display_row.append(display_value)
display_rows.append(display_row)
return display_rows
return display_rows

Wyświetl plik

@ -0,0 +1,276 @@
from datasette.utils import MultiParams
from urllib.parse import parse_qsl, urlunparse, parse_qs
from http.cookies import SimpleCookie, Morsel
import json
# Workaround for adding samesite support to pre 3.8 python
Morsel._reserved["samesite"] = "SameSite"
# Thanks, Starlette:
# https://github.com/encode/starlette/blob/519f575/starlette/responses.py#L17
SAMESITE_VALUES = ("strict", "lax", "none")
class DatasetteError(Exception):
def __init__(
self,
message,
title=None,
error_dict=None,
status=500,
template=None,
message_is_html=False,
):
self.message = message
self.title = title
self.error_dict = error_dict or {}
self.status = status
self.message_is_html = message_is_html
class RowError(Exception):
def __init__(self, error):
self.error = error
class StartupError(Exception):
pass
def _error(messages, status=400):
return Response.json({"ok": False, "errors": messages}, status=status)
class Base400(Exception):
status = 400
class NotFound(Base400):
status = 404
class DatabaseNotFound(NotFound):
def __init__(self, message, database_name):
super().__init__(message)
self.database_name = database_name
class TableNotFound(NotFound):
def __init__(self, message, database_name, table):
super().__init__(message)
self.database_name = database_name
self.table = table
class RowNotFound(NotFound):
def __init__(self, message, database_name, table, pk_values):
super().__init__(message)
self.database_name = database_name
self.table_name = table
self.pk_values = pk_values
class Forbidden(Base400):
status = 403
class BadRequest(Base400):
status = 400
class Request:
def __init__(self, scope, receive):
self.scope = scope
self.receive = receive
def __repr__(self):
return '<asgi.Request method="{}" url="{}">'.format(self.method, self.url)
@property
def method(self):
return self.scope["method"]
@property
def url(self):
return urlunparse(
(self.scheme, self.host, self.path, None, self.query_string, None)
)
@property
def url_vars(self):
return (self.scope.get("url_route") or {}).get("kwargs") or {}
@property
def scheme(self):
return self.scope.get("scheme") or "http"
@property
def headers(self):
return {
k.decode("latin-1").lower(): v.decode("latin-1")
for k, v in self.scope.get("headers") or []
}
@property
def host(self):
return self.headers.get("host") or "localhost"
@property
def cookies(self):
cookies = SimpleCookie()
cookies.load(self.headers.get("cookie", ""))
return {key: value.value for key, value in cookies.items()}
@property
def path(self):
if self.scope.get("raw_path") is not None:
return self.scope["raw_path"].decode("latin-1").partition("?")[0]
else:
path = self.scope["path"]
if isinstance(path, str):
return path
else:
return path.decode("utf-8")
@property
def query_string(self):
return (self.scope.get("query_string") or b"").decode("latin-1")
@property
def full_path(self):
qs = self.query_string
return "{}{}".format(self.path, ("?" + qs) if qs else "")
@property
def args(self):
return MultiParams(parse_qs(qs=self.query_string, keep_blank_values=True))
@property
def actor(self):
return self.scope.get("actor", None)
async def post_body(self):
body = b""
more_body = True
while more_body:
message = await self.receive()
assert message["type"] == "http.request", message
body += message.get("body", b"")
more_body = message.get("more_body", False)
return body
async def post_vars(self):
body = await self.post_body()
return dict(parse_qsl(body.decode("utf-8"), keep_blank_values=True))
@classmethod
def fake(cls, path_with_query_string, method="GET", scheme="http", url_vars=None):
"""Useful for constructing Request objects for tests"""
path, _, query_string = path_with_query_string.partition("?")
scope = {
"http_version": "1.1",
"method": method,
"path": path,
"raw_path": path_with_query_string.encode("latin-1"),
"query_string": query_string.encode("latin-1"),
"scheme": scheme,
"type": "http",
}
if url_vars:
scope["url_route"] = {"kwargs": url_vars}
return cls(scope, None)
class Response:
def __init__(self, body=None, status=200, headers=None, content_type="text/plain"):
self.body = body
self.status = status
self.headers = headers or {}
self._set_cookie_headers = []
self.content_type = content_type
async def asgi_send(self, send):
headers = {}
headers.update(self.headers)
headers["content-type"] = self.content_type
raw_headers = [
[key.encode("utf-8"), value.encode("utf-8")]
for key, value in headers.items()
]
for set_cookie in self._set_cookie_headers:
raw_headers.append([b"set-cookie", set_cookie.encode("utf-8")])
await send(
{
"type": "http.response.start",
"status": self.status,
"headers": raw_headers,
}
)
body = self.body
if not isinstance(body, bytes):
body = body.encode("utf-8")
await send({"type": "http.response.body", "body": body})
def set_cookie(
self,
key,
value="",
max_age=None,
expires=None,
path="/",
domain=None,
secure=False,
httponly=False,
samesite="lax",
):
assert samesite in SAMESITE_VALUES, "samesite should be one of {}".format(
SAMESITE_VALUES
)
cookie = SimpleCookie()
cookie[key] = value
for prop_name, prop_value in (
("max_age", max_age),
("expires", expires),
("path", path),
("domain", domain),
("samesite", samesite),
):
if prop_value is not None:
cookie[key][prop_name.replace("_", "-")] = prop_value
for prop_name, prop_value in (("secure", secure), ("httponly", httponly)):
if prop_value:
cookie[key][prop_name] = True
self._set_cookie_headers.append(cookie.output(header="").strip())
@classmethod
def html(cls, body, status=200, headers=None):
return cls(
body,
status=status,
headers=headers,
content_type="text/html; charset=utf-8",
)
@classmethod
def text(cls, body, status=200, headers=None):
return cls(
str(body),
status=status,
headers=headers,
content_type="text/plain; charset=utf-8",
)
@classmethod
def json(cls, body, status=200, headers=None, default=None):
return cls(
json.dumps(body, default=default),
status=status,
headers=headers,
content_type="application/json; charset=utf-8",
)
@classmethod
def redirect(cls, path, status=302, headers=None):
headers = headers or {}
headers["Location"] = path
return cls("", status=status, headers=headers)

Wyświetl plik

@ -7,7 +7,7 @@ from datasette.utils import (
make_slot_function,
CustomJSONEncoder,
)
from datasette.utils.asgi import Response
from datasette.views.error_module import Response
from datasette.version import __version__
from .base import BaseView

Wyświetl plik

@ -1,13 +1,13 @@
from datasette.utils.asgi import NotFound, Forbidden, Response
from datasette.database import QueryInterrupted
from datasette.events import UpdateRowEvent, DeleteRowEvent
from .base import DataView, BaseView, _error
from .base import DataView, BaseView
from datasette.utils import (
await_me_maybe,
make_slot_function,
to_css_class,
escape_sqlite,
)
from .error_module import _error, NotFound, Forbidden, Response
from datasette.plugins import pm
import json
import sqlite_utils
@ -169,14 +169,8 @@ class RowView(DataView):
foreign_key_tables.append({**fk, **{"count": count, "link": link}})
return foreign_key_tables
class RowError(Exception):
def __init__(self, error):
self.error = error
async def _resolve_row_and_check_permission(datasette, request, permission):
from datasette.app import DatabaseNotFound, TableNotFound, RowNotFound
from datasette.views.error_module import DatabaseNotFound, TableNotFound, RowNotFound
try:
resolved = await datasette.resolve_row(request)

Wyświetl plik

@ -1,6 +1,6 @@
import json
from datasette.events import LogoutEvent, LoginEvent, CreateTokenEvent
from datasette.utils.asgi import Response, Forbidden
from datasette.views.error_module import Response, Forbidden
from datasette.utils import (
actor_matches_allow,
add_cors_headers,

Wyświetl plik

@ -40,10 +40,11 @@ from datasette.utils import (
InvalidSql,
sqlite3,
)
from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response
from datasette.views.error_module import BadRequest, Forbidden, NotFound, Response
from datasette.filters import Filters
import sqlite_utils
from .base import BaseView, DatasetteError, ureg, _error, stream_csv
from .base import BaseView, ureg, stream_csv
from .error_module import DatasetteError, _error
from .database import QueryView
LINK_WITH_LABEL = (
@ -705,7 +706,12 @@ async def _sortable_columns_for_table(datasette, database_name, table_name, use_
db = datasette.databases[database_name]
table_metadata = await datasette.table_config(database_name, table_name)
if "sortable_columns" in table_metadata:
sortable_columns = set(table_metadata["sortable_columns"])
# fix now allows any primary key to be sorted as well with the metadata
sort_col = set(table_metadata["sortable_columns"])
pk_col = set(await db.primary_keys(table_name))
sortable_columns = [sort_col.pop()]
if len(pk_col) > 0:
sortable_columns.append(pk_col.pop())
else:
sortable_columns = set(await db.table_columns(table_name))
if use_rowid:
@ -713,7 +719,8 @@ async def _sortable_columns_for_table(datasette, database_name, table_name, use_
return sortable_columns
async def _sort_order(table_metadata, sortable_columns, request, order_by):
async def _sort_order(datasette, database_name, table_name, table_metadata, sortable_columns, request, order_by):
db = datasette.databases[database_name]
sort = request.args.get("_sort")
sort_desc = request.args.get("_sort_desc")
@ -770,7 +777,7 @@ async def table_view(datasette, request):
async def table_view_traced(datasette, request):
from datasette.app import TableNotFound
from datasette.views.error_module import TableNotFound
try:
resolved = await datasette.resolve_table(request)
@ -1042,7 +1049,7 @@ async def table_view_data(
)
sort, sort_desc, order_by = await _sort_order(
table_metadata, sortable_columns, request, order_by
datasette, database_name, table_name, table_metadata, sortable_columns, request, order_by
)
from_sql = "from {table_name} {where}".format(

8
pythonProject/.idea/.gitignore vendored 100644
Wyświetl plik

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

Wyświetl plik

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

Wyświetl plik

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (pythonProject)" project-jdk-type="Python SDK" />
</project>

Wyświetl plik

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/pythonProject.iml" filepath="$PROJECT_DIR$/.idea/pythonProject.iml" />
</modules>
</component>
</project>

Wyświetl plik

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

Wyświetl plik

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

0
serve 100644
Wyświetl plik

Wyświetl plik

@ -133,14 +133,14 @@ async def test_insert_rows(ds_write, return_rows):
{},
None,
404,
["Database not found: data2"],
["Invalid Database: The database data2 was not found. Return to the previous page below to ensure your own Database was created properly."],
),
(
"/data/docs2/-/insert",
{},
None,
404,
["Table not found: docs2"],
["Invalid Table: docs2 was not found. Return to the previous page below, table is not present within the Database file."],
),
(
"/data/docs/-/insert",
@ -274,7 +274,7 @@ async def test_insert_rows(ds_write, return_rows):
{"rows": [{"title": "Test"}]},
None,
404,
["Table not found: badtable"],
["Invalid Table: badtable was not found. Return to the previous page below, table is not present within the Database file."],
),
# missing primary key
(
@ -598,7 +598,7 @@ async def test_delete_row_errors(ds_write, scenario):
assert (
response.json()["errors"] == ["Permission denied"]
if scenario == "no_token"
else ["Table not found: bad_table"]
else ["Invalid Table: bad_table was not found. Return to the previous page below, table is not present within the Database file."]
)
assert len((await ds_write.client.get("/data/docs.json?_shape=array")).json()) == 1
@ -703,7 +703,7 @@ async def test_update_row_check_permission(ds_write, scenario):
assert (
response.json()["errors"] == ["Permission denied"]
if scenario == "no_token"
else ["Table not found: bad_table"]
else ["Invalid Table: bad_table was not found. Return to the previous page below, table is not present within the Database file."]
)
@ -830,7 +830,7 @@ async def test_drop_table(ds_write, scenario):
assert response.json()["ok"] is False
expected_error = "Permission denied"
if scenario == "bad_table":
expected_error = "Table not found: bad_table"
expected_error = "Invalid Table: bad_table was not found. Return to the previous page below, table is not present within the Database file."
elif scenario == "immutable":
expected_error = "Database is immutable"
assert response.json()["errors"] == [expected_error]

Wyświetl plik

@ -36,7 +36,7 @@ async def test_table_json(ds_client):
async def test_table_not_exists_json(ds_client):
assert (await ds_client.get("/fixtures/blah.json")).json() == {
"ok": False,
"error": "Table not found: blah",
"error": "Invalid Table: blah was not found. Return to the previous page below, table is not present within the Database file.",
"status": 404,
"title": None,
}