+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000000..105ce2da2d
--- /dev/null
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000000..8d613e68be
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000000..7e17799b0b
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000000..35eb1ddfbb
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/datasette/__init__.py b/datasette/__init__.py
index 47d2b4f6db..b8552b98a1 100644
--- a/datasette/__init__.py
+++ b/datasette/__init__.py
@@ -1,7 +1,7 @@
from datasette.permissions import Permission # noqa
from datasette.version import __version_info__, __version__ # noqa
from datasette.events import Event # noqa
-from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa
+from datasette.views.error_module import Forbidden,NotFound,Response, Request
from datasette.utils import actor_matches_allow # noqa
from datasette.views import Context # noqa
from .hookspecs import hookimpl # noqa
diff --git a/datasette/app.py b/datasette/app.py
index 23d21600c6..c1c7c63ec8 100644
--- a/datasette/app.py
+++ b/datasette/app.py
@@ -60,11 +60,10 @@
from .renderer import json_renderer
from .url_builder import Urls
from .database import Database, QueryInterrupted
-
+from datasette.views.error_module import StartupError
from .utils import (
PrefixedUrlString,
SPATIALITE_FUNCTIONS,
- StartupError,
async_call_with_supported_arguments,
await_me_maybe,
call_with_supported_arguments,
@@ -87,19 +86,14 @@
)
from .utils.asgi import (
AsgiLifespan,
- Forbidden,
- NotFound,
- DatabaseNotFound,
- TableNotFound,
- RowNotFound,
- Request,
- Response,
AsgiRunOnFirstRequest,
asgi_static,
asgi_send,
asgi_send_file,
asgi_send_redirect,
)
+from .views.error_module import (Forbidden, NotFound, DatabaseNotFound,
+ TableNotFound, RowNotFound, Request, Response)
from .utils.internal_db import init_internal_db, populate_schema_tables
from .utils.sqlite import (
sqlite3,
@@ -240,27 +234,27 @@ class Datasette:
ERROR = 3
def __init__(
- self,
- files=None,
- immutables=None,
- cache_headers=True,
- cors=False,
- inspect_data=None,
- config=None,
- metadata=None,
- sqlite_extensions=None,
- template_dir=None,
- plugins_dir=None,
- static_mounts=None,
- memory=False,
- settings=None,
- secret=None,
- version_note=None,
- config_dir=None,
- pdb=False,
- crossdb=False,
- nolock=False,
- internal=None,
+ self,
+ files=None,
+ immutables=None,
+ cache_headers=True,
+ cors=False,
+ inspect_data=None,
+ config=None,
+ metadata=None,
+ sqlite_extensions=None,
+ template_dir=None,
+ plugins_dir=None,
+ static_mounts=None,
+ memory=False,
+ settings=None,
+ secret=None,
+ version_note=None,
+ config_dir=None,
+ pdb=False,
+ crossdb=False,
+ nolock=False,
+ internal=None,
):
self._startup_invoked = False
assert config_dir is None or isinstance(
@@ -278,9 +272,9 @@ def __init__(
db_files.extend(config_dir.glob("*.{}".format(ext)))
self.files += tuple(str(f) for f in db_files)
if (
- config_dir
- and (config_dir / "inspect-data.json").exists()
- and not inspect_data
+ config_dir
+ and (config_dir / "inspect-data.json").exists()
+ and not inspect_data
):
inspect_data = json.loads((config_dir / "inspect-data.json").read_text())
if not immutables:
@@ -447,7 +441,7 @@ def get_jinja_environment(self, request: Request = None) -> Environment:
environment = self._jinja_env
if request:
for environment in pm.hook.jinja2_environment_from_request(
- datasette=self, request=request, env=environment
+ datasette=self, request=request, env=environment
):
pass
return environment
@@ -539,7 +533,7 @@ async def invoke_startup(self):
abbrs[p.abbr] = p
self.permissions[p.name] = p
for hook in pm.hook.prepare_jinja2_environment(
- env=self._jinja_env, datasette=self
+ env=self._jinja_env, datasette=self
):
await await_me_maybe(hook)
for hook in pm.hook.startup(datasette=self):
@@ -553,13 +547,13 @@ def unsign(self, signed, namespace="default"):
return URLSafeSerializer(self._secret, namespace).loads(signed)
def create_token(
- self,
- actor_id: str,
- *,
- expires_after: Optional[int] = None,
- restrict_all: Optional[Iterable[str]] = None,
- restrict_database: Optional[Dict[str, Iterable[str]]] = None,
- restrict_resource: Optional[Dict[str, Dict[str, Iterable[str]]]] = None,
+ self,
+ actor_id: str,
+ *,
+ expires_after: Optional[int] = None,
+ restrict_all: Optional[Iterable[str]] = None,
+ restrict_database: Optional[Dict[str, Iterable[str]]] = None,
+ restrict_resource: Optional[Dict[str, Dict[str, Iterable[str]]]] = None,
):
token = {"a": actor_id, "t": int(time.time())}
if expires_after:
@@ -652,12 +646,12 @@ def metadata(self, key=None, database=None, table=None, fallback=True):
Returns None if metadata value is not found.
"""
assert not (
- database is None and table is not None
+ database is None and table is not None
), "Cannot call metadata() with table= specified but not database="
metadata = {}
for hook_dbs in pm.hook.get_metadata(
- datasette=self, key=key, database=database, table=table
+ datasette=self, key=key, database=database, table=table
):
metadata = self._metadata_recursive_update(metadata, hook_dbs)
@@ -726,7 +720,7 @@ def _plugin_config_nested(self, plugin_name, database, table=None, fallback=True
if table:
table_plugin_config = (
- ((db_config.get("tables") or {}).get(table) or {}).get("plugins") or {}
+ ((db_config.get("tables") or {}).get(table) or {}).get("plugins") or {}
).get(plugin_name)
# fallback to db_config or top-level config, in that order, if needed
@@ -745,18 +739,18 @@ def app_css_hash(self):
if not hasattr(self, "_app_css_hash"):
with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp:
self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[
- :6
- ]
+ :6
+ ]
return self._app_css_hash
async def get_canned_queries(self, database_name, actor):
queries = (
- ((self.config or {}).get("databases") or {}).get(database_name) or {}
- ).get("queries") or {}
+ ((self.config or {}).get("databases") or {}).get(database_name) or {}
+ ).get("queries") or {}
for more_queries in pm.hook.canned_queries(
- datasette=self,
- database=database_name,
- actor=actor,
+ datasette=self,
+ database=database_name,
+ actor=actor,
):
more_queries = await await_me_maybe(more_queries)
queries.update(more_queries or {})
@@ -782,7 +776,7 @@ def update_with_inherited_metadata(self, metadata):
"source_url": metadata.get("source_url") or self.metadata("source_url"),
"license": metadata.get("license") or self.metadata("license"),
"license_url": metadata.get("license_url")
- or self.metadata("license_url"),
+ or self.metadata("license_url"),
"about": metadata.get("about") or self.metadata("about"),
"about_url": metadata.get("about_url") or self.metadata("about_url"),
}
@@ -852,9 +846,9 @@ async def _crumb_items(self, request, table=None, database=None):
# Database link
if database:
if await self.permission_allowed(
- actor=actor,
- action="view-database",
- resource=database,
+ actor=actor,
+ action="view-database",
+ resource=database,
):
crumbs.append(
{
@@ -866,9 +860,9 @@ async def _crumb_items(self, request, table=None, database=None):
if table:
assert database, "table= requires database="
if await self.permission_allowed(
- actor=actor,
- action="view-table",
- resource=(database, table),
+ actor=actor,
+ action="view-table",
+ resource=(database, table),
):
crumbs.append(
{
@@ -879,7 +873,7 @@ async def _crumb_items(self, request, table=None, database=None):
return crumbs
async def actors_from_ids(
- self, actor_ids: Iterable[Union[str, int]]
+ self, actor_ids: Iterable[Union[str, int]]
) -> Dict[Union[id, str], Dict]:
result = pm.hook.actors_from_ids(datasette=self, actor_ids=actor_ids)
if result is None:
@@ -896,7 +890,7 @@ async def track_event(self, event: Event):
await await_me_maybe(hook)
async def permission_allowed(
- self, actor, action, resource=None, *, default=DEFAULT_NOT_SET
+ self, actor, action, resource=None, *, default=DEFAULT_NOT_SET
):
"""Check permissions using the permissions_allowed plugin hook"""
result = None
@@ -906,10 +900,10 @@ async def permission_allowed(
opinions = []
# Every plugin is consulted for their opinion
for check in pm.hook.permission_allowed(
- datasette=self,
- actor=actor,
- action=action,
- resource=resource,
+ datasette=self,
+ actor=actor,
+ action=action,
+ resource=resource,
):
check = await await_me_maybe(check)
if check is not None:
@@ -941,9 +935,9 @@ async def permission_allowed(
return result
async def ensure_permissions(
- self,
- actor: dict,
- permissions: Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]],
+ self,
+ actor: dict,
+ permissions: Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]],
):
"""
permissions is a list of (action, resource) tuples or 'action' strings
@@ -976,18 +970,18 @@ async def ensure_permissions(
raise Forbidden(action)
async def check_visibility(
- self,
- actor: dict,
- action: Optional[str] = None,
- resource: Optional[Union[str, Tuple[str, str]]] = None,
- permissions: Optional[
- Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]]
- ] = None,
+ self,
+ actor: dict,
+ action: Optional[str] = None,
+ resource: Optional[Union[str, Tuple[str, str]]] = None,
+ permissions: Optional[
+ Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]]
+ ] = None,
):
"""Returns (visible, private) - visible = can you see it, private = can others see it too"""
if permissions:
assert (
- not action and not resource
+ not action and not resource
), "Can't use action= or resource= with permissions="
else:
permissions = [(action, resource)]
@@ -1005,14 +999,14 @@ async def check_visibility(
return True, False
async def execute(
- self,
- db_name,
- sql,
- params=None,
- truncate=False,
- custom_time_limit=None,
- page_size=None,
- log_sql_errors=True,
+ self,
+ db_name,
+ sql,
+ params=None,
+ truncate=False,
+ custom_time_limit=None,
+ page_size=None,
+ log_sql_errors=True,
):
return await self.databases[db_name].execute(
sql,
@@ -1076,7 +1070,7 @@ async def expand_foreign_keys(self, actor, database, table, column, values):
def absolute_url(self, request, path):
url = urllib.parse.urljoin(request.url, path)
if url.startswith("http://") and self.setting("force_https_urls"):
- url = "https://" + url[len("http://") :]
+ url = "https://" + url[len("http://"):]
return url
def _register_custom_units(self):
@@ -1104,8 +1098,8 @@ def _versions(self):
sqlite_version = conn.execute("select sqlite_version()").fetchone()[0]
sqlite_extensions = {}
for extension, testsql, hasversion in (
- ("json1", "SELECT json('{}')", False),
- ("spatialite", "SELECT spatialite_version()", True),
+ ("json1", "SELECT json('{}')", False),
+ ("spatialite", "SELECT spatialite_version()", True),
):
try:
result = conn.execute(testsql)
@@ -1249,11 +1243,11 @@ def _register_renderers(self):
)
async def render_template(
- self,
- templates: Union[List[str], str, Template],
- context: Optional[Union[Dict[str, Any], Context]] = None,
- request: Optional[Request] = None,
- view_name: Optional[str] = None,
+ self,
+ templates: Union[List[str], str, Template],
+ context: Optional[Union[Dict[str, Any], Context]] = None,
+ request: Optional[Request] = None,
+ view_name: Optional[str] = None,
):
if not self._startup_invoked:
raise Exception("render_template() called before await ds.invoke_startup()")
@@ -1269,13 +1263,13 @@ async def render_template(
body_scripts = []
# pylint: disable=no-member
for extra_script in pm.hook.extra_body_script(
- template=template.name,
- database=context.get("database"),
- table=context.get("table"),
- columns=context.get("columns"),
- view_name=view_name,
- request=request,
- datasette=self,
+ template=template.name,
+ database=context.get("database"),
+ table=context.get("table"),
+ columns=context.get("columns"),
+ view_name=view_name,
+ request=request,
+ datasette=self,
):
extra_script = await await_me_maybe(extra_script)
if isinstance(extra_script, dict):
@@ -1289,13 +1283,13 @@ async def render_template(
extra_template_vars = {}
# pylint: disable=no-member
for extra_vars in pm.hook.extra_template_vars(
- template=template.name,
- database=context.get("database"),
- table=context.get("table"),
- columns=context.get("columns"),
- view_name=view_name,
- request=request,
- datasette=self,
+ template=template.name,
+ database=context.get("database"),
+ table=context.get("table"),
+ columns=context.get("columns"),
+ view_name=view_name,
+ request=request,
+ datasette=self,
):
extra_vars = await await_me_maybe(extra_vars)
assert isinstance(extra_vars, dict), "extra_vars is of type {}".format(
@@ -1306,9 +1300,9 @@ async def render_template(
async def menu_links():
links = []
for hook in pm.hook.menu_links(
- datasette=self,
- actor=request.actor if request else None,
- request=request or None,
+ datasette=self,
+ actor=request.actor if request else None,
+ request=request or None,
):
extra_links = await await_me_maybe(hook)
if extra_links:
@@ -1325,8 +1319,8 @@ async def menu_links():
"menu_links": menu_links,
"display_actor": display_actor,
"show_logout": request is not None
- and "ds_actor" in request.cookies
- and request.actor,
+ and "ds_actor" in request.cookies
+ and request.actor,
"app_css_hash": self.app_css_hash(),
"zip": zip,
"body_scripts": body_scripts,
@@ -1356,13 +1350,13 @@ async def _asset_urls(self, key, template, context, request, view_name):
seen_urls = set()
collected = []
for hook in getattr(pm.hook, key)(
- template=template.name,
- database=context.get("database"),
- table=context.get("table"),
- columns=context.get("columns"),
- view_name=view_name,
- request=request,
- datasette=self,
+ template=template.name,
+ database=context.get("database"),
+ table=context.get("table"),
+ columns=context.get("columns"),
+ view_name=view_name,
+ request=request,
+ datasette=self,
):
hook = await await_me_maybe(hook)
collected.extend(hook)
@@ -1553,7 +1547,8 @@ async def resolve_database(self, request):
return self.get_database(route=database_route)
except KeyError:
raise DatabaseNotFound(
- "Database not found: {}".format(database_route), database_route
+ "Invalid Database: The database {} was not found. Return to the previous page below to ensure your own Database was created properly.".format(
+ database_route), database_route
)
async def resolve_table(self, request):
@@ -1566,7 +1561,8 @@ async def resolve_table(self, request):
is_view = await db.view_exists(table_name)
if not (table_exists or is_view):
raise TableNotFound(
- "Table not found: {}".format(table_name), db.name, table_name
+ "Invalid Table: {} was not found. Return to the previous page below, table is not present within the Database file.".format(
+ table_name), db.name, table_name
)
return ResolvedTable(db, table_name, is_view)
@@ -1578,7 +1574,8 @@ async def resolve_row(self, request):
row = results.first()
if row is None:
raise RowNotFound(
- "Row not found: {}".format(pk_values), db.name, table_name, pk_values
+ "Invalid Row: The row id {} is invalid on the table. It may be spelled incorrectly or not present on the table, use the button below to go back.".format(
+ pk_values), db.name, table_name, pk_values
)
return ResolvedRow(db, table_name, sql, params, pks, pk_values, results.first())
@@ -1628,7 +1625,7 @@ async def route_path(self, scope, receive, send, path):
# Strip off base_url if present before routing
base_url = self.ds.setting("base_url")
if base_url != "/" and path.startswith(base_url):
- path = "/" + path[len(base_url) :]
+ path = "/" + path[len(base_url):]
scope = dict(scope, route_path=path)
request = Request(scope, receive)
# Populate request_messages if ds_messages cookie is present
@@ -1642,9 +1639,9 @@ async def route_path(self, scope, receive, send, path):
scope_modifications = {}
# Apply force_https_urls, if set
if (
- self.ds.setting("force_https_urls")
- and scope["type"] == "http"
- and scope.get("scheme") != "https"
+ self.ds.setting("force_https_urls")
+ and scope["type"] == "http"
+ and scope.get("scheme") != "https"
):
scope_modifications["scheme"] = "https"
# Handle authentication
@@ -1675,7 +1672,7 @@ async def route_path(self, scope, receive, send, path):
except Forbidden as exception:
# Try the forbidden() plugin hook
for custom_response in pm.hook.forbidden(
- datasette=self.ds, request=request, message=exception.args[0]
+ datasette=self.ds, request=request, message=exception.args[0]
):
custom_response = await await_me_maybe(custom_response)
assert (
@@ -1721,7 +1718,7 @@ async def handle_404(self, request, send, exception=None):
if "{" in filepath and filepath.startswith("pages/")
]
page_routes = [
- (route_pattern_from_filepath(filepath[len("pages/") :]), filepath)
+ (route_pattern_from_filepath(filepath[len("pages/"):]), filepath)
for filepath in pattern_templates
]
try:
@@ -1793,9 +1790,9 @@ def raise_404(message=""):
async def handle_exception(self, request, send, exception):
responses = []
for hook in pm.hook.handle_exception(
- datasette=self.ds,
- request=request,
- exception=exception,
+ datasette=self.ds,
+ request=request,
+ exception=exception,
):
response = await await_me_maybe(hook)
if response is not None:
@@ -1935,8 +1932,8 @@ def _fix(self, path, avoid_path_rewrites=False):
async def _request(self, method, path, **kwargs):
async with httpx.AsyncClient(
- transport=httpx.ASGITransport(app=self.app),
- cookies=kwargs.pop("cookies", None),
+ transport=httpx.ASGITransport(app=self.app),
+ cookies=kwargs.pop("cookies", None),
) as client:
return await getattr(client, method)(self._fix(path), **kwargs)
@@ -1964,8 +1961,8 @@ async def delete(self, path, **kwargs):
async def request(self, method, path, **kwargs):
avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None)
async with httpx.AsyncClient(
- transport=httpx.ASGITransport(app=self.app),
- cookies=kwargs.pop("cookies", None),
+ transport=httpx.ASGITransport(app=self.app),
+ cookies=kwargs.pop("cookies", None),
) as client:
return await client.request(
method, self._fix(path, avoid_path_rewrites), **kwargs
diff --git a/datasette/blob_renderer.py b/datasette/blob_renderer.py
index 4d8c6beac8..69c493e8b6 100644
--- a/datasette/blob_renderer.py
+++ b/datasette/blob_renderer.py
@@ -1,5 +1,5 @@
from datasette import hookimpl
-from datasette.utils.asgi import Response, BadRequest
+from datasette.views.error_module import Response, BadRequest
from datasette.utils import to_css_class
import hashlib
diff --git a/datasette/cli.py b/datasette/cli.py
index 0c8a854102..8e3313c51d 100644
--- a/datasette/cli.py
+++ b/datasette/cli.py
@@ -21,9 +21,9 @@
SQLITE_LIMIT_ATTACHED,
pm,
)
+from datasette.views.error_module import StartupError
from .utils import (
LoadExtension,
- StartupError,
check_connection,
find_spatialite,
parse_metadata,
@@ -248,20 +248,20 @@ def plugins(all, requirements, plugins_dir):
@click.option("--about", help="About label for metadata")
@click.option("--about_url", help="About URL for metadata")
def package(
- files,
- tag,
- metadata,
- extra_options,
- branch,
- template_dir,
- plugins_dir,
- static,
- install,
- spatialite,
- version_note,
- secret,
- port,
- **extra_metadata,
+ files,
+ tag,
+ metadata,
+ extra_options,
+ branch,
+ template_dir,
+ plugins_dir,
+ static,
+ install,
+ spatialite,
+ version_note,
+ secret,
+ port,
+ **extra_metadata,
):
"""Package SQLite files into a Datasette Docker container"""
if not shutil.which("docker"):
@@ -274,20 +274,20 @@ def package(
)
sys.exit(1)
with temporary_docker_directory(
- files,
- "datasette",
- metadata=metadata,
- extra_options=extra_options,
- branch=branch,
- template_dir=template_dir,
- plugins_dir=plugins_dir,
- static=static,
- install=install,
- spatialite=spatialite,
- version_note=version_note,
- secret=secret,
- extra_metadata=extra_metadata,
- port=port,
+ files,
+ "datasette",
+ metadata=metadata,
+ extra_options=extra_options,
+ branch=branch,
+ template_dir=template_dir,
+ plugins_dir=plugins_dir,
+ static=static,
+ install=install,
+ spatialite=spatialite,
+ version_note=version_note,
+ secret=secret,
+ extra_metadata=extra_metadata,
+ port=port,
):
args = ["docker", "build"]
if tag:
@@ -352,9 +352,9 @@ def uninstall(packages, yes):
"--host",
default="127.0.0.1",
help=(
- "Host for server. Defaults to 127.0.0.1 which means only connections "
- "from the local machine will be allowed. Use 0.0.0.0 to listen to "
- "all IPs and allow access from other machines."
+ "Host for server. Defaults to 127.0.0.1 which means only connections "
+ "from the local machine will be allowed. Use 0.0.0.0 to listen to "
+ "all IPs and allow access from other machines."
),
)
@click.option(
@@ -478,38 +478,38 @@ def uninstall(packages, yes):
help="Path to a persistent Datasette internal SQLite database",
)
def serve(
- files,
- immutable,
- host,
- port,
- uds,
- reload,
- cors,
- sqlite_extensions,
- inspect_file,
- metadata,
- template_dir,
- plugins_dir,
- static,
- memory,
- config,
- settings,
- secret,
- root,
- get,
- token,
- actor,
- version_note,
- help_settings,
- pdb,
- open_browser,
- create,
- crossdb,
- nolock,
- ssl_keyfile,
- ssl_certfile,
- internal,
- return_instance=False,
+ files,
+ immutable,
+ host,
+ port,
+ uds,
+ reload,
+ cors,
+ sqlite_extensions,
+ inspect_file,
+ metadata,
+ template_dir,
+ plugins_dir,
+ static,
+ memory,
+ config,
+ settings,
+ secret,
+ root,
+ get,
+ token,
+ actor,
+ version_note,
+ help_settings,
+ pdb,
+ open_browser,
+ create,
+ crossdb,
+ nolock,
+ ssl_keyfile,
+ ssl_certfile,
+ internal,
+ return_instance=False,
):
"""Serve up specified SQLite database files with a web UI"""
if help_settings:
@@ -582,12 +582,16 @@ def serve(
# Verify list of files, create if needed (and --create)
for file in files:
+ files = list(files)
+ if files[0] == 'serve':
+ files.pop(0)
+ files = tuple(files)
if not pathlib.Path(file).exists():
if create:
sqlite3.connect(file).execute("vacuum")
else:
raise click.ClickException(
- "Invalid value for '[FILES]...': Path '{}' does not exist.".format(
+ "Invalid value for '[FILES]...': Path '{}' does not exist".format(
file
)
)
@@ -601,7 +605,6 @@ def serve(
raise click.ClickException("Could not find SpatiaLite extension")
except StartupError as e:
raise click.ClickException(e.args[0])
-
if return_instance:
# Private utility mechanism for writing unit tests
return ds
@@ -708,7 +711,7 @@ def serve(
help="Path to directory containing custom plugins",
)
def create_token(
- id, secret, expires_after, alls, databases, resources, debug, plugins_dir
+ id, secret, expires_after, alls, databases, resources, debug, plugins_dir
):
"""
Create a signed API token for the specified actor ID
@@ -777,7 +780,7 @@ def create_token(
)
click.echo(token)
if debug:
- encoded = token[len("dstok_") :]
+ encoded = token[len("dstok_"):]
click.echo("\nDecoded:\n")
click.echo(json.dumps(ds.unsign(encoded, namespace="token"), indent=2))
@@ -810,9 +813,9 @@ async def check_databases(ds):
)
# If --crossdb and more than SQLITE_LIMIT_ATTACHED show warning
if (
- ds.crossdb
- and len([db for db in ds.databases.values() if not db.is_memory])
- > SQLITE_LIMIT_ATTACHED
+ ds.crossdb
+ and len([db for db in ds.databases.values() if not db.is_memory])
+ > SQLITE_LIMIT_ATTACHED
):
msg = (
"Warning: --crossdb only works with the first {} attached databases".format(
diff --git a/datasette/database.py b/datasette/database.py
index ffe94ea725..24e8e8f11b 100644
--- a/datasette/database.py
+++ b/datasette/database.py
@@ -20,7 +20,7 @@
table_columns,
table_column_details,
)
-from .inspect import inspect_hash
+from .my_inspect import inspect_hash
connections = threading.local()
diff --git a/datasette/filters.py b/datasette/filters.py
index 585d4865b6..8998de8c00 100644
--- a/datasette/filters.py
+++ b/datasette/filters.py
@@ -1,6 +1,5 @@
from datasette import hookimpl
-from datasette.views.base import DatasetteError
-from datasette.utils.asgi import BadRequest
+from datasette.views.error_module import DatasetteError, BadRequest
import json
import numbers
from .utils import detect_json1, escape_sqlite, path_with_removed_args
diff --git a/datasette/forbidden.py b/datasette/forbidden.py
index 41c4839602..5685210560 100644
--- a/datasette/forbidden.py
+++ b/datasette/forbidden.py
@@ -1,5 +1,5 @@
-from datasette import hookimpl, Response
-
+from datasette import hookimpl
+from datasette.views.error_module import Response
@hookimpl(trylast=True)
def forbidden(datasette, request, message):
diff --git a/datasette/handle_exception.py b/datasette/handle_exception.py
index 1a0ac979fe..dd2521e8e3 100644
--- a/datasette/handle_exception.py
+++ b/datasette/handle_exception.py
@@ -1,9 +1,7 @@
-from datasette import hookimpl, Response
+from datasette import hookimpl
from .utils import add_cors_headers
-from .utils.asgi import (
- Base400,
-)
-from .views.base import DatasetteError
+from .views.error_module import Response, Base400
+from .views.error_module import DatasetteError
from markupsafe import Markup
import pdb
import traceback
diff --git a/datasette/inspect.py b/datasette/my_inspect.py
similarity index 99%
rename from datasette/inspect.py
rename to datasette/my_inspect.py
index ede142d016..da8c38a4fa 100644
--- a/datasette/inspect.py
+++ b/datasette/my_inspect.py
@@ -1,6 +1,6 @@
import hashlib
-from .utils import (
+from datasette.utils import (
detect_spatialite,
detect_fts,
detect_primary_keys,
diff --git a/datasette/renderer.py b/datasette/renderer.py
index a446e69d0e..2d4c49d65a 100644
--- a/datasette/renderer.py
+++ b/datasette/renderer.py
@@ -6,7 +6,7 @@
path_from_row_pks,
sqlite3,
)
-from datasette.utils.asgi import Response
+from datasette.views.error_module import Response
def convert_specific_columns_to_json(rows, columns, json_cols):
diff --git a/datasette/templates/error.html b/datasette/templates/error.html
index 3451d886d1..831c29b093 100644
--- a/datasette/templates/error.html
+++ b/datasette/templates/error.html
@@ -1,11 +1,10 @@
{% extends "base.html" %}
{% block title %}{% if title %}{{ title }}{% else %}Error {{ status }}{% endif %}{% endblock %}
-
{% block content %}
{% if title %}{{ title }}{% else %}Error {{ status }}{% endif %}
{{ error }}
-
+Return to Previous
{% endblock %}
diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py
index e110891119..433fac3bb0 100644
--- a/datasette/utils/__init__.py
+++ b/datasette/utils/__init__.py
@@ -469,6 +469,7 @@ def temporary_docker_directory(
file_names = [os.path.split(f)[-1] for f in files]
if metadata:
metadata_content = parse_metadata(metadata.read())
+ verify_metadata(metadata_content)
else:
metadata_content = {}
# Merge in the non-null values in extra_metadata
@@ -521,6 +522,25 @@ def temporary_docker_directory(
os.chdir(saved_cwd)
+def verify_metadata(metadata_content):
+ """
+ Verify the validity of the metadata.
+ Args:
+ metadata_content (dict): The metadata to be verified.
+ Returns:
+ bool: True if the metadata is valid, False otherwise.
+ """
+ required_keys = ["title", "license", "license_url", "source", "source_url", "about", "about_url"]
+
+ # Check if all required keys are present
+ for key in required_keys:
+ if key not in metadata_content:
+ print(f"Error: Missing required key '{key}' in metadata")
+ return False
+
+ return True
+
+
def detect_primary_keys(conn, table):
"""Figure out primary keys for a table."""
columns = table_column_details(conn, table)
@@ -1127,10 +1147,6 @@ def method(self, *args, **kwargs):
return super().__getattribute__(name)
-class StartupError(Exception):
- pass
-
-
_re_named_parameter = re.compile(":([a-zA-Z0-9_]+)")
diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py
index 2fad1d425b..a3afe9dd2c 100644
--- a/datasette/utils/asgi.py
+++ b/datasette/utils/asgi.py
@@ -1,161 +1,12 @@
import hashlib
import json
-from datasette.utils import MultiParams, calculate_etag
+from datasette.utils import calculate_etag
from mimetypes import guess_type
-from urllib.parse import parse_qs, urlunparse, parse_qsl
+
from pathlib import Path
-from http.cookies import SimpleCookie, Morsel
import aiofiles
import aiofiles.os
-# Workaround for adding samesite support to pre 3.8 python
-Morsel._reserved["samesite"] = "SameSite"
-# Thanks, Starlette:
-# https://github.com/encode/starlette/blob/519f575/starlette/responses.py#L17
-
-
-class Base400(Exception):
- status = 400
-
-
-class NotFound(Base400):
- status = 404
-
-
-class DatabaseNotFound(NotFound):
- def __init__(self, message, database_name):
- super().__init__(message)
- self.database_name = database_name
-
-
-class TableNotFound(NotFound):
- def __init__(self, message, database_name, table):
- super().__init__(message)
- self.database_name = database_name
- self.table = table
-
-
-class RowNotFound(NotFound):
- def __init__(self, message, database_name, table, pk_values):
- super().__init__(message)
- self.database_name = database_name
- self.table_name = table
- self.pk_values = pk_values
-
-
-class Forbidden(Base400):
- status = 403
-
-
-class BadRequest(Base400):
- status = 400
-
-
-SAMESITE_VALUES = ("strict", "lax", "none")
-
-
-class Request:
- def __init__(self, scope, receive):
- self.scope = scope
- self.receive = receive
-
- def __repr__(self):
- return ''.format(self.method, self.url)
-
- @property
- def method(self):
- return self.scope["method"]
-
- @property
- def url(self):
- return urlunparse(
- (self.scheme, self.host, self.path, None, self.query_string, None)
- )
-
- @property
- def url_vars(self):
- return (self.scope.get("url_route") or {}).get("kwargs") or {}
-
- @property
- def scheme(self):
- return self.scope.get("scheme") or "http"
-
- @property
- def headers(self):
- return {
- k.decode("latin-1").lower(): v.decode("latin-1")
- for k, v in self.scope.get("headers") or []
- }
-
- @property
- def host(self):
- return self.headers.get("host") or "localhost"
-
- @property
- def cookies(self):
- cookies = SimpleCookie()
- cookies.load(self.headers.get("cookie", ""))
- return {key: value.value for key, value in cookies.items()}
-
- @property
- def path(self):
- if self.scope.get("raw_path") is not None:
- return self.scope["raw_path"].decode("latin-1").partition("?")[0]
- else:
- path = self.scope["path"]
- if isinstance(path, str):
- return path
- else:
- return path.decode("utf-8")
-
- @property
- def query_string(self):
- return (self.scope.get("query_string") or b"").decode("latin-1")
-
- @property
- def full_path(self):
- qs = self.query_string
- return "{}{}".format(self.path, ("?" + qs) if qs else "")
-
- @property
- def args(self):
- return MultiParams(parse_qs(qs=self.query_string, keep_blank_values=True))
-
- @property
- def actor(self):
- return self.scope.get("actor", None)
-
- async def post_body(self):
- body = b""
- more_body = True
- while more_body:
- message = await self.receive()
- assert message["type"] == "http.request", message
- body += message.get("body", b"")
- more_body = message.get("more_body", False)
- return body
-
- async def post_vars(self):
- body = await self.post_body()
- return dict(parse_qsl(body.decode("utf-8"), keep_blank_values=True))
-
- @classmethod
- def fake(cls, path_with_query_string, method="GET", scheme="http", url_vars=None):
- """Useful for constructing Request objects for tests"""
- path, _, query_string = path_with_query_string.partition("?")
- scope = {
- "http_version": "1.1",
- "method": method,
- "path": path,
- "raw_path": path_with_query_string.encode("latin-1"),
- "query_string": query_string.encode("latin-1"),
- "scheme": scheme,
- "type": "http",
- }
- if url_vars:
- scope["url_route"] = {"kwargs": url_vars}
- return cls(scope, None)
-
class AsgiLifespan:
def __init__(self, app, on_startup=None, on_shutdown=None):
@@ -347,102 +198,6 @@ async def inner_static(request, send):
return inner_static
-
-class Response:
- def __init__(self, body=None, status=200, headers=None, content_type="text/plain"):
- self.body = body
- self.status = status
- self.headers = headers or {}
- self._set_cookie_headers = []
- self.content_type = content_type
-
- async def asgi_send(self, send):
- headers = {}
- headers.update(self.headers)
- headers["content-type"] = self.content_type
- raw_headers = [
- [key.encode("utf-8"), value.encode("utf-8")]
- for key, value in headers.items()
- ]
- for set_cookie in self._set_cookie_headers:
- raw_headers.append([b"set-cookie", set_cookie.encode("utf-8")])
- await send(
- {
- "type": "http.response.start",
- "status": self.status,
- "headers": raw_headers,
- }
- )
- body = self.body
- if not isinstance(body, bytes):
- body = body.encode("utf-8")
- await send({"type": "http.response.body", "body": body})
-
- def set_cookie(
- self,
- key,
- value="",
- max_age=None,
- expires=None,
- path="/",
- domain=None,
- secure=False,
- httponly=False,
- samesite="lax",
- ):
- assert samesite in SAMESITE_VALUES, "samesite should be one of {}".format(
- SAMESITE_VALUES
- )
- cookie = SimpleCookie()
- cookie[key] = value
- for prop_name, prop_value in (
- ("max_age", max_age),
- ("expires", expires),
- ("path", path),
- ("domain", domain),
- ("samesite", samesite),
- ):
- if prop_value is not None:
- cookie[key][prop_name.replace("_", "-")] = prop_value
- for prop_name, prop_value in (("secure", secure), ("httponly", httponly)):
- if prop_value:
- cookie[key][prop_name] = True
- self._set_cookie_headers.append(cookie.output(header="").strip())
-
- @classmethod
- def html(cls, body, status=200, headers=None):
- return cls(
- body,
- status=status,
- headers=headers,
- content_type="text/html; charset=utf-8",
- )
-
- @classmethod
- def text(cls, body, status=200, headers=None):
- return cls(
- str(body),
- status=status,
- headers=headers,
- content_type="text/plain; charset=utf-8",
- )
-
- @classmethod
- def json(cls, body, status=200, headers=None, default=None):
- return cls(
- json.dumps(body, default=default),
- status=status,
- headers=headers,
- content_type="application/json; charset=utf-8",
- )
-
- @classmethod
- def redirect(cls, path, status=302, headers=None):
- headers = headers or {}
- headers["Location"] = path
- return cls("", status=status, headers=headers)
-
-
class AsgiFileDownload:
def __init__(
self,
@@ -478,4 +233,4 @@ async def __call__(self, scope, receive, send):
self._started = True
for hook in self.on_startup:
await hook()
- return await self.asgi(scope, receive, send)
+ return await self.asgi(scope, receive, send)
\ No newline at end of file
diff --git a/datasette/views/base.py b/datasette/views/base.py
index 9d7a854c39..0657e06704 100644
--- a/datasette/views/base.py
+++ b/datasette/views/base.py
@@ -6,12 +6,12 @@
import time
import urllib
from markupsafe import escape
+from .error_module import DatasetteError, Request, Response, NotFound, BadRequest
import pint
from datasette.database import QueryInterrupted
-from datasette.utils.asgi import Request
from datasette.utils import (
add_cors_headers,
await_me_maybe,
@@ -26,31 +26,12 @@
sqlite3,
)
from datasette.utils.asgi import (
- AsgiStream,
- NotFound,
- Response,
- BadRequest,
+ AsgiStream
)
ureg = pint.UnitRegistry()
-class DatasetteError(Exception):
- def __init__(
- self,
- message,
- title=None,
- error_dict=None,
- status=500,
- template=None,
- message_is_html=False,
- ):
- self.message = message
- self.title = title
- self.error_dict = error_dict or {}
- self.status = status
- self.message_is_html = message_is_html
-
class View:
async def head(self, request, datasette):
@@ -410,11 +391,6 @@ def set_response_headers(self, response, ttl):
add_cors_headers(response.headers)
return response
-
-def _error(messages, status=400):
- return Response.json({"ok": False, "errors": messages}, status=status)
-
-
async def stream_csv(datasette, fetch_data, request, database):
kwargs = {}
stream = request.args.get("_stream")
diff --git a/datasette/views/database.py b/datasette/views/database.py
index 851ae21fa1..24c62d164f 100644
--- a/datasette/views/database.py
+++ b/datasette/views/database.py
@@ -31,10 +31,11 @@
truncate_url,
InvalidSql,
)
-from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden
+from datasette.utils.asgi import AsgiFileDownload
from datasette.plugins import pm
+from .error_module import DatasetteError, _error, NotFound, Response, Forbidden
-from .base import BaseView, DatasetteError, View, _error, stream_csv
+from .base import BaseView, View, stream_csv
class DatabaseView(View):
@@ -342,7 +343,7 @@ async def database_download(request, datasette):
class QueryView(View):
async def post(self, request, datasette):
- from datasette.app import TableNotFound
+ from datasette.views.error_module import TableNotFound
db = await datasette.resolve_database(request)
@@ -431,7 +432,7 @@ async def post(self, request, datasette):
return Response.redirect(redirect_url or request.path)
async def get(self, request, datasette):
- from datasette.app import TableNotFound
+ from datasette.views.error_module import TableNotFound
db = await datasette.resolve_database(request)
database = db.name
@@ -933,8 +934,25 @@ async def post(self, request):
return _error(["columns must be a list of objects"])
if not column.get("name") or not isinstance(column.get("name"), str):
return _error(["Column name is required"])
+ # Check if type is specified
+
if not column.get("type"):
- column["type"] = "text"
+ # If type is not specified, check the values in the column
+ column_values = [value for value in column.get("values", []) if value is not None]
+
+ # Check if all values in the column are integers
+ if all(isinstance(value, int) for value in column_values):
+ column["type"] = "integer"
+ # Check if all values in the column are floats
+ elif all(isinstance(value, float) for value in column_values):
+ column["type"] = "float"
+ # Check if all values in the column are booleans
+ elif all(isinstance(value, bool) for value in column_values):
+ column["type"] = "boolean"
+ # If values are not all integers, floats, or booleans, set type as "text"
+ else:
+ column["type"] = "text"
+
if column["type"] not in self._supported_column_types:
return _error(
["Unsupported column type: {}".format(column["type"])]
@@ -1140,4 +1158,4 @@ async def display_rows(datasette, database, request, rows, columns):
display_value = display_value[:truncate_cells] + "\u2026"
display_row.append(display_value)
display_rows.append(display_row)
- return display_rows
+ return display_rows
\ No newline at end of file
diff --git a/datasette/views/error_module.py b/datasette/views/error_module.py
new file mode 100644
index 0000000000..59e67b8076
--- /dev/null
+++ b/datasette/views/error_module.py
@@ -0,0 +1,276 @@
+from datasette.utils import MultiParams
+from urllib.parse import parse_qsl, urlunparse, parse_qs
+from http.cookies import SimpleCookie, Morsel
+import json
+
+# Workaround for adding samesite support to pre 3.8 python
+Morsel._reserved["samesite"] = "SameSite"
+# Thanks, Starlette:
+# https://github.com/encode/starlette/blob/519f575/starlette/responses.py#L17
+
+SAMESITE_VALUES = ("strict", "lax", "none")
+
+
+class DatasetteError(Exception):
+ def __init__(
+ self,
+ message,
+ title=None,
+ error_dict=None,
+ status=500,
+ template=None,
+ message_is_html=False,
+ ):
+ self.message = message
+ self.title = title
+ self.error_dict = error_dict or {}
+ self.status = status
+ self.message_is_html = message_is_html
+
+
+class RowError(Exception):
+ def __init__(self, error):
+ self.error = error
+
+
+class StartupError(Exception):
+ pass
+
+
+def _error(messages, status=400):
+ return Response.json({"ok": False, "errors": messages}, status=status)
+
+
+class Base400(Exception):
+ status = 400
+
+
+class NotFound(Base400):
+ status = 404
+
+
+class DatabaseNotFound(NotFound):
+ def __init__(self, message, database_name):
+ super().__init__(message)
+ self.database_name = database_name
+
+
+class TableNotFound(NotFound):
+ def __init__(self, message, database_name, table):
+ super().__init__(message)
+ self.database_name = database_name
+ self.table = table
+
+
+class RowNotFound(NotFound):
+ def __init__(self, message, database_name, table, pk_values):
+ super().__init__(message)
+ self.database_name = database_name
+ self.table_name = table
+ self.pk_values = pk_values
+
+
+class Forbidden(Base400):
+ status = 403
+
+
+class BadRequest(Base400):
+ status = 400
+
+
+class Request:
+ def __init__(self, scope, receive):
+ self.scope = scope
+ self.receive = receive
+
+ def __repr__(self):
+ return ''.format(self.method, self.url)
+
+ @property
+ def method(self):
+ return self.scope["method"]
+
+ @property
+ def url(self):
+ return urlunparse(
+ (self.scheme, self.host, self.path, None, self.query_string, None)
+ )
+
+ @property
+ def url_vars(self):
+ return (self.scope.get("url_route") or {}).get("kwargs") or {}
+
+ @property
+ def scheme(self):
+ return self.scope.get("scheme") or "http"
+
+ @property
+ def headers(self):
+ return {
+ k.decode("latin-1").lower(): v.decode("latin-1")
+ for k, v in self.scope.get("headers") or []
+ }
+
+ @property
+ def host(self):
+ return self.headers.get("host") or "localhost"
+
+ @property
+ def cookies(self):
+ cookies = SimpleCookie()
+ cookies.load(self.headers.get("cookie", ""))
+ return {key: value.value for key, value in cookies.items()}
+
+ @property
+ def path(self):
+ if self.scope.get("raw_path") is not None:
+ return self.scope["raw_path"].decode("latin-1").partition("?")[0]
+ else:
+ path = self.scope["path"]
+ if isinstance(path, str):
+ return path
+ else:
+ return path.decode("utf-8")
+
+ @property
+ def query_string(self):
+ return (self.scope.get("query_string") or b"").decode("latin-1")
+
+ @property
+ def full_path(self):
+ qs = self.query_string
+ return "{}{}".format(self.path, ("?" + qs) if qs else "")
+
+ @property
+ def args(self):
+ return MultiParams(parse_qs(qs=self.query_string, keep_blank_values=True))
+
+ @property
+ def actor(self):
+ return self.scope.get("actor", None)
+
+ async def post_body(self):
+ body = b""
+ more_body = True
+ while more_body:
+ message = await self.receive()
+ assert message["type"] == "http.request", message
+ body += message.get("body", b"")
+ more_body = message.get("more_body", False)
+ return body
+
+ async def post_vars(self):
+ body = await self.post_body()
+ return dict(parse_qsl(body.decode("utf-8"), keep_blank_values=True))
+
+ @classmethod
+ def fake(cls, path_with_query_string, method="GET", scheme="http", url_vars=None):
+ """Useful for constructing Request objects for tests"""
+ path, _, query_string = path_with_query_string.partition("?")
+ scope = {
+ "http_version": "1.1",
+ "method": method,
+ "path": path,
+ "raw_path": path_with_query_string.encode("latin-1"),
+ "query_string": query_string.encode("latin-1"),
+ "scheme": scheme,
+ "type": "http",
+ }
+ if url_vars:
+ scope["url_route"] = {"kwargs": url_vars}
+ return cls(scope, None)
+
+
+class Response:
+ def __init__(self, body=None, status=200, headers=None, content_type="text/plain"):
+ self.body = body
+ self.status = status
+ self.headers = headers or {}
+ self._set_cookie_headers = []
+ self.content_type = content_type
+
+ async def asgi_send(self, send):
+ headers = {}
+ headers.update(self.headers)
+ headers["content-type"] = self.content_type
+ raw_headers = [
+ [key.encode("utf-8"), value.encode("utf-8")]
+ for key, value in headers.items()
+ ]
+ for set_cookie in self._set_cookie_headers:
+ raw_headers.append([b"set-cookie", set_cookie.encode("utf-8")])
+ await send(
+ {
+ "type": "http.response.start",
+ "status": self.status,
+ "headers": raw_headers,
+ }
+ )
+ body = self.body
+ if not isinstance(body, bytes):
+ body = body.encode("utf-8")
+ await send({"type": "http.response.body", "body": body})
+
+ def set_cookie(
+ self,
+ key,
+ value="",
+ max_age=None,
+ expires=None,
+ path="/",
+ domain=None,
+ secure=False,
+ httponly=False,
+ samesite="lax",
+ ):
+ assert samesite in SAMESITE_VALUES, "samesite should be one of {}".format(
+ SAMESITE_VALUES
+ )
+ cookie = SimpleCookie()
+ cookie[key] = value
+ for prop_name, prop_value in (
+ ("max_age", max_age),
+ ("expires", expires),
+ ("path", path),
+ ("domain", domain),
+ ("samesite", samesite),
+ ):
+ if prop_value is not None:
+ cookie[key][prop_name.replace("_", "-")] = prop_value
+ for prop_name, prop_value in (("secure", secure), ("httponly", httponly)):
+ if prop_value:
+ cookie[key][prop_name] = True
+ self._set_cookie_headers.append(cookie.output(header="").strip())
+
+ @classmethod
+ def html(cls, body, status=200, headers=None):
+ return cls(
+ body,
+ status=status,
+ headers=headers,
+ content_type="text/html; charset=utf-8",
+ )
+
+ @classmethod
+ def text(cls, body, status=200, headers=None):
+ return cls(
+ str(body),
+ status=status,
+ headers=headers,
+ content_type="text/plain; charset=utf-8",
+ )
+
+ @classmethod
+ def json(cls, body, status=200, headers=None, default=None):
+ return cls(
+ json.dumps(body, default=default),
+ status=status,
+ headers=headers,
+ content_type="application/json; charset=utf-8",
+ )
+
+ @classmethod
+ def redirect(cls, path, status=302, headers=None):
+ headers = headers or {}
+ headers["Location"] = path
+ return cls("", status=status, headers=headers)
diff --git a/datasette/views/index.py b/datasette/views/index.py
index 6546b7ae83..3d77929630 100644
--- a/datasette/views/index.py
+++ b/datasette/views/index.py
@@ -7,7 +7,7 @@
make_slot_function,
CustomJSONEncoder,
)
-from datasette.utils.asgi import Response
+from datasette.views.error_module import Response
from datasette.version import __version__
from .base import BaseView
diff --git a/datasette/views/row.py b/datasette/views/row.py
index 49d390f6be..d5e4888cf7 100644
--- a/datasette/views/row.py
+++ b/datasette/views/row.py
@@ -1,13 +1,13 @@
-from datasette.utils.asgi import NotFound, Forbidden, Response
from datasette.database import QueryInterrupted
from datasette.events import UpdateRowEvent, DeleteRowEvent
-from .base import DataView, BaseView, _error
+from .base import DataView, BaseView
from datasette.utils import (
await_me_maybe,
make_slot_function,
to_css_class,
escape_sqlite,
)
+from .error_module import _error, NotFound, Forbidden, Response
from datasette.plugins import pm
import json
import sqlite_utils
@@ -169,14 +169,8 @@ async def foreign_key_tables(self, database, table, pk_values):
foreign_key_tables.append({**fk, **{"count": count, "link": link}})
return foreign_key_tables
-
-class RowError(Exception):
- def __init__(self, error):
- self.error = error
-
-
async def _resolve_row_and_check_permission(datasette, request, permission):
- from datasette.app import DatabaseNotFound, TableNotFound, RowNotFound
+ from datasette.views.error_module import DatabaseNotFound, TableNotFound, RowNotFound
try:
resolved = await datasette.resolve_row(request)
diff --git a/datasette/views/special.py b/datasette/views/special.py
index c80e816f5a..cd5238af1f 100644
--- a/datasette/views/special.py
+++ b/datasette/views/special.py
@@ -1,6 +1,6 @@
import json
from datasette.events import LogoutEvent, LoginEvent, CreateTokenEvent
-from datasette.utils.asgi import Response, Forbidden
+from datasette.views.error_module import Response, Forbidden
from datasette.utils import (
actor_matches_allow,
add_cors_headers,
diff --git a/datasette/views/table.py b/datasette/views/table.py
index ba03241dbc..5170ebf606 100644
--- a/datasette/views/table.py
+++ b/datasette/views/table.py
@@ -40,10 +40,11 @@
InvalidSql,
sqlite3,
)
-from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response
+from datasette.views.error_module import BadRequest, Forbidden, NotFound, Response
from datasette.filters import Filters
import sqlite_utils
-from .base import BaseView, DatasetteError, ureg, _error, stream_csv
+from .base import BaseView, ureg, stream_csv
+from .error_module import DatasetteError, _error
from .database import QueryView
LINK_WITH_LABEL = (
@@ -705,7 +706,12 @@ async def _sortable_columns_for_table(datasette, database_name, table_name, use_
db = datasette.databases[database_name]
table_metadata = await datasette.table_config(database_name, table_name)
if "sortable_columns" in table_metadata:
- sortable_columns = set(table_metadata["sortable_columns"])
+ # fix now allows any primary key to be sorted as well with the metadata
+ sort_col = set(table_metadata["sortable_columns"])
+ pk_col = set(await db.primary_keys(table_name))
+ sortable_columns = [sort_col.pop()]
+ if len(pk_col) > 0:
+ sortable_columns.append(pk_col.pop())
else:
sortable_columns = set(await db.table_columns(table_name))
if use_rowid:
@@ -713,7 +719,8 @@ async def _sortable_columns_for_table(datasette, database_name, table_name, use_
return sortable_columns
-async def _sort_order(table_metadata, sortable_columns, request, order_by):
+async def _sort_order(datasette, database_name, table_name, table_metadata, sortable_columns, request, order_by):
+ db = datasette.databases[database_name]
sort = request.args.get("_sort")
sort_desc = request.args.get("_sort_desc")
@@ -770,7 +777,7 @@ async def table_view(datasette, request):
async def table_view_traced(datasette, request):
- from datasette.app import TableNotFound
+ from datasette.views.error_module import TableNotFound
try:
resolved = await datasette.resolve_table(request)
@@ -1042,7 +1049,7 @@ async def table_view_data(
)
sort, sort_desc, order_by = await _sort_order(
- table_metadata, sortable_columns, request, order_by
+ datasette, database_name, table_name, table_metadata, sortable_columns, request, order_by
)
from_sql = "from {table_name} {where}".format(
diff --git a/pythonProject/.idea/.gitignore b/pythonProject/.idea/.gitignore
new file mode 100644
index 0000000000..13566b81b0
--- /dev/null
+++ b/pythonProject/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Editor-based HTTP Client requests
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
diff --git a/pythonProject/.idea/inspectionProfiles/profiles_settings.xml b/pythonProject/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000000..105ce2da2d
--- /dev/null
+++ b/pythonProject/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pythonProject/.idea/misc.xml b/pythonProject/.idea/misc.xml
new file mode 100644
index 0000000000..f56ad02b95
--- /dev/null
+++ b/pythonProject/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/pythonProject/.idea/modules.xml b/pythonProject/.idea/modules.xml
new file mode 100644
index 0000000000..e15ec35fe0
--- /dev/null
+++ b/pythonProject/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pythonProject/.idea/pythonProject.iml b/pythonProject/.idea/pythonProject.iml
new file mode 100644
index 0000000000..2c80e12694
--- /dev/null
+++ b/pythonProject/.idea/pythonProject.iml
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pythonProject/.idea/vcs.xml b/pythonProject/.idea/vcs.xml
new file mode 100644
index 0000000000..6c0b863585
--- /dev/null
+++ b/pythonProject/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/serve b/serve
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/test_api_write.py b/tests/test_api_write.py
index 6a7ddeb61d..a6d1c8fc06 100644
--- a/tests/test_api_write.py
+++ b/tests/test_api_write.py
@@ -133,14 +133,14 @@ async def test_insert_rows(ds_write, return_rows):
{},
None,
404,
- ["Database not found: data2"],
+ ["Invalid Database: The database data2 was not found. Return to the previous page below to ensure your own Database was created properly."],
),
(
"/data/docs2/-/insert",
{},
None,
404,
- ["Table not found: docs2"],
+ ["Invalid Table: docs2 was not found. Return to the previous page below, table is not present within the Database file."],
),
(
"/data/docs/-/insert",
@@ -274,7 +274,7 @@ async def test_insert_rows(ds_write, return_rows):
{"rows": [{"title": "Test"}]},
None,
404,
- ["Table not found: badtable"],
+ ["Invalid Table: badtable was not found. Return to the previous page below, table is not present within the Database file."],
),
# missing primary key
(
@@ -598,7 +598,7 @@ async def test_delete_row_errors(ds_write, scenario):
assert (
response.json()["errors"] == ["Permission denied"]
if scenario == "no_token"
- else ["Table not found: bad_table"]
+ else ["Invalid Table: bad_table was not found. Return to the previous page below, table is not present within the Database file."]
)
assert len((await ds_write.client.get("/data/docs.json?_shape=array")).json()) == 1
@@ -703,7 +703,7 @@ async def test_update_row_check_permission(ds_write, scenario):
assert (
response.json()["errors"] == ["Permission denied"]
if scenario == "no_token"
- else ["Table not found: bad_table"]
+ else ["Invalid Table: bad_table was not found. Return to the previous page below, table is not present within the Database file."]
)
@@ -830,7 +830,7 @@ async def test_drop_table(ds_write, scenario):
assert response.json()["ok"] is False
expected_error = "Permission denied"
if scenario == "bad_table":
- expected_error = "Table not found: bad_table"
+ expected_error = "Invalid Table: bad_table was not found. Return to the previous page below, table is not present within the Database file."
elif scenario == "immutable":
expected_error = "Database is immutable"
assert response.json()["errors"] == [expected_error]
diff --git a/tests/test_table_api.py b/tests/test_table_api.py
index 5893095045..30996c32e9 100644
--- a/tests/test_table_api.py
+++ b/tests/test_table_api.py
@@ -36,7 +36,7 @@ async def test_table_json(ds_client):
async def test_table_not_exists_json(ds_client):
assert (await ds_client.get("/fixtures/blah.json")).json() == {
"ok": False,
- "error": "Table not found: blah",
+ "error": "Invalid Table: blah was not found. Return to the previous page below, table is not present within the Database file.",
"status": 404,
"title": None,
}