Created
February 5, 2025 05:50
-
-
Save simonw/1922544763b08c76f0b904e2ece364ea to your computer and use it in GitHub Desktop.
files-to-prompt datasette -e py -
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<documents> | |
<document index="1"> | |
<source>datasette/__init__.py</source> | |
<document_content> | |
from datasette.permissions import Permission # noqa | |
from datasette.version import __version_info__, __version__ # noqa | |
from datasette.events import Event # noqa | |
from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa | |
from datasette.utils import actor_matches_allow # noqa | |
from datasette.views import Context # noqa | |
from .hookspecs import hookimpl # noqa | |
from .hookspecs import hookspec # noqa | |
</document_content> | |
</document> | |
<document index="2"> | |
<source>datasette/__main__.py</source> | |
<document_content> | |
from datasette.cli import cli | |
if __name__ == "__main__": | |
cli() | |
</document_content> | |
</document> | |
<document index="3"> | |
<source>datasette/actor_auth_cookie.py</source> | |
<document_content> | |
from datasette import hookimpl | |
from itsdangerous import BadSignature | |
from datasette.utils import baseconv | |
import time | |
@hookimpl | |
def actor_from_request(datasette, request): | |
if "ds_actor" not in request.cookies: | |
return None | |
try: | |
decoded = datasette.unsign(request.cookies["ds_actor"], "actor") | |
# If it has "e" and "a" keys process the "e" expiry | |
if not isinstance(decoded, dict) or "a" not in decoded: | |
return None | |
expires_at = decoded.get("e") | |
if expires_at: | |
timestamp = int(baseconv.base62.decode(expires_at)) | |
if time.time() > timestamp: | |
return None | |
return decoded["a"] | |
except BadSignature: | |
return None | |
</document_content> | |
</document> | |
<document index="4"> | |
<source>datasette/app.py</source> | |
<document_content> | |
from asgi_csrf import Errors | |
import asyncio | |
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union | |
import asgi_csrf | |
import collections | |
import contextvars | |
import dataclasses | |
import datetime | |
import functools | |
import glob | |
import hashlib | |
import httpx | |
import importlib.metadata | |
import inspect | |
from itsdangerous import BadSignature | |
import json | |
import os | |
import re | |
import secrets | |
import sys | |
import threading | |
import time | |
import types | |
import urllib.parse | |
import uuid | |
from concurrent import futures | |
from pathlib import Path | |
from markupsafe import Markup, escape | |
from itsdangerous import URLSafeSerializer | |
from jinja2 import ( | |
ChoiceLoader, | |
Environment, | |
FileSystemLoader, | |
PrefixLoader, | |
) | |
from jinja2.environment import Template | |
from jinja2.exceptions import TemplateNotFound | |
from .events import Event | |
from .views import Context | |
from .views.database import database_download, DatabaseView, TableCreateView, QueryView | |
from .views.index import IndexView | |
from .views.special import ( | |
JsonDataView, | |
PatternPortfolioView, | |
AuthTokenView, | |
ApiExplorerView, | |
CreateTokenView, | |
LogoutView, | |
AllowDebugView, | |
PermissionsDebugView, | |
MessagesDebugView, | |
) | |
from .views.table import ( | |
TableInsertView, | |
TableUpsertView, | |
TableDropView, | |
table_view, | |
) | |
from .views.row import RowView, RowDeleteView, RowUpdateView | |
from .renderer import json_renderer | |
from .url_builder import Urls | |
from .database import Database, QueryInterrupted | |
from .utils import ( | |
PrefixedUrlString, | |
SPATIALITE_FUNCTIONS, | |
StartupError, | |
async_call_with_supported_arguments, | |
await_me_maybe, | |
baseconv, | |
call_with_supported_arguments, | |
detect_json1, | |
display_actor, | |
escape_css_string, | |
escape_sqlite, | |
find_spatialite, | |
format_bytes, | |
module_from_path, | |
move_plugins_and_allow, | |
move_table_config, | |
parse_metadata, | |
resolve_env_secrets, | |
resolve_routes, | |
tilde_decode, | |
to_css_class, | |
urlsafe_components, | |
redact_keys, | |
row_sql_params_pks, | |
) | |
from .utils.asgi import ( | |
AsgiLifespan, | |
Forbidden, | |
NotFound, | |
DatabaseNotFound, | |
TableNotFound, | |
RowNotFound, | |
Request, | |
Response, | |
AsgiRunOnFirstRequest, | |
asgi_static, | |
asgi_send, | |
asgi_send_file, | |
asgi_send_redirect, | |
) | |
from .utils.internal_db import init_internal_db, populate_schema_tables | |
from .utils.sqlite import ( | |
sqlite3, | |
using_pysqlite3, | |
) | |
from .tracer import AsgiTracer | |
from .plugins import pm, DEFAULT_PLUGINS, get_plugins | |
from .version import __version__ | |
app_root = Path(__file__).parent.parent | |
# https://github.com/simonw/datasette/issues/283#issuecomment-781591015 | |
SQLITE_LIMIT_ATTACHED = 10 | |
Setting = collections.namedtuple("Setting", ("name", "default", "help")) | |
SETTINGS = ( | |
Setting("default_page_size", 100, "Default page size for the table view"), | |
Setting( | |
"max_returned_rows", | |
1000, | |
"Maximum rows that can be returned from a table or custom query", | |
), | |
Setting( | |
"max_insert_rows", | |
100, | |
"Maximum rows that can be inserted at a time using the bulk insert API", | |
), | |
Setting( | |
"num_sql_threads", | |
3, | |
"Number of threads in the thread pool for executing SQLite queries", | |
), | |
Setting("sql_time_limit_ms", 1000, "Time limit for a SQL query in milliseconds"), | |
Setting( | |
"default_facet_size", 30, "Number of values to return for requested facets" | |
), | |
Setting("facet_time_limit_ms", 200, "Time limit for calculating a requested facet"), | |
Setting( | |
"facet_suggest_time_limit_ms", | |
50, | |
"Time limit for calculating a suggested facet", | |
), | |
Setting( | |
"allow_facet", | |
True, | |
"Allow users to specify columns to facet using ?_facet= parameter", | |
), | |
Setting( | |
"allow_download", | |
True, | |
"Allow users to download the original SQLite database files", | |
), | |
Setting( | |
"allow_signed_tokens", | |
True, | |
"Allow users to create and use signed API tokens", | |
), | |
Setting( | |
"default_allow_sql", | |
True, | |
"Allow anyone to run arbitrary SQL queries", | |
), | |
Setting( | |
"max_signed_tokens_ttl", | |
0, | |
"Maximum allowed expiry time for signed API tokens", | |
), | |
Setting("suggest_facets", True, "Calculate and display suggested facets"), | |
Setting( | |
"default_cache_ttl", | |
5, | |
"Default HTTP cache TTL (used in Cache-Control: max-age= header)", | |
), | |
Setting("cache_size_kb", 0, "SQLite cache size in KB (0 == use SQLite default)"), | |
Setting( | |
"allow_csv_stream", | |
True, | |
"Allow .csv?_stream=1 to download all rows (ignoring max_returned_rows)", | |
), | |
Setting( | |
"max_csv_mb", | |
100, | |
"Maximum size allowed for CSV export in MB - set 0 to disable this limit", | |
), | |
Setting( | |
"truncate_cells_html", | |
2048, | |
"Truncate cells longer than this in HTML table view - set 0 to disable", | |
), | |
Setting( | |
"force_https_urls", | |
False, | |
"Force URLs in API output to always use https:// protocol", | |
), | |
Setting( | |
"template_debug", | |
False, | |
"Allow display of template debug information with ?_context=1", | |
), | |
Setting( | |
"trace_debug", | |
False, | |
"Allow display of SQL trace debug information with ?_trace=1", | |
), | |
Setting("base_url", "/", "Datasette URLs should use this base path"), | |
) | |
_HASH_URLS_REMOVED = "The hash_urls setting has been removed, try the datasette-hashed-urls plugin instead" | |
OBSOLETE_SETTINGS = { | |
"hash_urls": _HASH_URLS_REMOVED, | |
"default_cache_ttl_hashed": _HASH_URLS_REMOVED, | |
} | |
DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS} | |
FAVICON_PATH = app_root / "datasette" / "static" / "favicon.png" | |
DEFAULT_NOT_SET = object() | |
async def favicon(request, send): | |
await asgi_send_file( | |
send, | |
str(FAVICON_PATH), | |
content_type="image/png", | |
headers={"Cache-Control": "max-age=3600, immutable, public"}, | |
) | |
ResolvedTable = collections.namedtuple("ResolvedTable", ("db", "table", "is_view")) | |
ResolvedRow = collections.namedtuple( | |
"ResolvedRow", ("db", "table", "sql", "params", "pks", "pk_values", "row") | |
) | |
def _to_string(value): | |
if isinstance(value, str): | |
return value | |
else: | |
return json.dumps(value, default=str) | |
class Datasette: | |
# Message constants: | |
INFO = 1 | |
WARNING = 2 | |
ERROR = 3 | |
def __init__( | |
self, | |
files=None, | |
immutables=None, | |
cache_headers=True, | |
cors=False, | |
inspect_data=None, | |
config=None, | |
metadata=None, | |
sqlite_extensions=None, | |
template_dir=None, | |
plugins_dir=None, | |
static_mounts=None, | |
memory=False, | |
settings=None, | |
secret=None, | |
version_note=None, | |
config_dir=None, | |
pdb=False, | |
crossdb=False, | |
nolock=False, | |
internal=None, | |
): | |
self._startup_invoked = False | |
self._request_id = contextvars.ContextVar("request_id", default=None) | |
assert config_dir is None or isinstance( | |
config_dir, Path | |
), "config_dir= should be a pathlib.Path" | |
self.config_dir = config_dir | |
self.pdb = pdb | |
self._secret = secret or secrets.token_hex(32) | |
if files is not None and isinstance(files, str): | |
raise ValueError("files= must be a list of paths, not a string") | |
self.files = tuple(files or []) + tuple(immutables or []) | |
if config_dir: | |
db_files = [] | |
for ext in ("db", "sqlite", "sqlite3"): | |
db_files.extend(config_dir.glob("*.{}".format(ext))) | |
self.files += tuple(str(f) for f in db_files) | |
if ( | |
config_dir | |
and (config_dir / "inspect-data.json").exists() | |
and not inspect_data | |
): | |
inspect_data = json.loads((config_dir / "inspect-data.json").read_text()) | |
if not immutables: | |
immutable_filenames = [i["file"] for i in inspect_data.values()] | |
immutables = [ | |
f for f in self.files if Path(f).name in immutable_filenames | |
] | |
self.inspect_data = inspect_data | |
self.immutables = set(immutables or []) | |
self.databases = collections.OrderedDict() | |
self.permissions = {} # .invoke_startup() will populate this | |
try: | |
self._refresh_schemas_lock = asyncio.Lock() | |
except RuntimeError as rex: | |
# Workaround for intermittent test failure, see: | |
# https://github.com/simonw/datasette/issues/1802 | |
if "There is no current event loop in thread" in str(rex): | |
loop = asyncio.new_event_loop() | |
asyncio.set_event_loop(loop) | |
self._refresh_schemas_lock = asyncio.Lock() | |
else: | |
raise | |
self.crossdb = crossdb | |
self.nolock = nolock | |
if memory or crossdb or not self.files: | |
self.add_database( | |
Database(self, is_mutable=False, is_memory=True), name="_memory" | |
) | |
for file in self.files: | |
self.add_database( | |
Database(self, file, is_mutable=file not in self.immutables) | |
) | |
self.internal_db_created = False | |
if internal is None: | |
self._internal_database = Database(self, memory_name=secrets.token_hex()) | |
else: | |
self._internal_database = Database(self, path=internal, mode="rwc") | |
self._internal_database.name = "__INTERNAL__" | |
self.cache_headers = cache_headers | |
self.cors = cors | |
config_files = [] | |
metadata_files = [] | |
if config_dir: | |
metadata_files = [ | |
config_dir / filename | |
for filename in ("metadata.json", "metadata.yaml", "metadata.yml") | |
if (config_dir / filename).exists() | |
] | |
config_files = [ | |
config_dir / filename | |
for filename in ("datasette.json", "datasette.yaml", "datasette.yml") | |
if (config_dir / filename).exists() | |
] | |
if config_dir and metadata_files and not metadata: | |
with metadata_files[0].open() as fp: | |
metadata = parse_metadata(fp.read()) | |
if config_dir and config_files and not config: | |
with config_files[0].open() as fp: | |
config = parse_metadata(fp.read()) | |
# Move any "plugins" and "allow" settings from metadata to config - updates them in place | |
metadata = metadata or {} | |
config = config or {} | |
metadata, config = move_plugins_and_allow(metadata, config) | |
# Now migrate any known table configuration settings over as well | |
metadata, config = move_table_config(metadata, config) | |
self._metadata_local = metadata or {} | |
self.sqlite_extensions = [] | |
for extension in sqlite_extensions or []: | |
# Resolve spatialite, if requested | |
if extension == "spatialite": | |
# Could raise SpatialiteNotFound | |
self.sqlite_extensions.append(find_spatialite()) | |
else: | |
self.sqlite_extensions.append(extension) | |
if config_dir and (config_dir / "templates").is_dir() and not template_dir: | |
template_dir = str((config_dir / "templates").resolve()) | |
self.template_dir = template_dir | |
if config_dir and (config_dir / "plugins").is_dir() and not plugins_dir: | |
plugins_dir = str((config_dir / "plugins").resolve()) | |
self.plugins_dir = plugins_dir | |
if config_dir and (config_dir / "static").is_dir() and not static_mounts: | |
static_mounts = [("static", str((config_dir / "static").resolve()))] | |
self.static_mounts = static_mounts or [] | |
if config_dir and (config_dir / "datasette.json").exists() and not config: | |
config = json.loads((config_dir / "datasette.json").read_text()) | |
config = config or {} | |
config_settings = config.get("settings") or {} | |
# validate "settings" keys in datasette.json | |
for key in config_settings: | |
if key not in DEFAULT_SETTINGS: | |
raise StartupError("Invalid setting '{}' in datasette.json".format(key)) | |
self.config = config | |
# CLI settings should overwrite datasette.json settings | |
self._settings = dict(DEFAULT_SETTINGS, **(config_settings), **(settings or {})) | |
self.renderers = {} # File extension -> (renderer, can_render) functions | |
self.version_note = version_note | |
if self.setting("num_sql_threads") == 0: | |
self.executor = None | |
else: | |
self.executor = futures.ThreadPoolExecutor( | |
max_workers=self.setting("num_sql_threads") | |
) | |
self.max_returned_rows = self.setting("max_returned_rows") | |
self.sql_time_limit_ms = self.setting("sql_time_limit_ms") | |
self.page_size = self.setting("default_page_size") | |
# Execute plugins in constructor, to ensure they are available | |
# when the rest of `datasette inspect` executes | |
if self.plugins_dir: | |
for filepath in glob.glob(os.path.join(self.plugins_dir, "*.py")): | |
if not os.path.isfile(filepath): | |
continue | |
mod = module_from_path(filepath, name=os.path.basename(filepath)) | |
try: | |
pm.register(mod) | |
except ValueError: | |
# Plugin already registered | |
pass | |
# Configure Jinja | |
default_templates = str(app_root / "datasette" / "templates") | |
template_paths = [] | |
if self.template_dir: | |
template_paths.append(self.template_dir) | |
plugin_template_paths = [ | |
plugin["templates_path"] | |
for plugin in get_plugins() | |
if plugin["templates_path"] | |
] | |
template_paths.extend(plugin_template_paths) | |
template_paths.append(default_templates) | |
template_loader = ChoiceLoader( | |
[ | |
FileSystemLoader(template_paths), | |
# Support {% extends "default:table.html" %}: | |
PrefixLoader( | |
{"default": FileSystemLoader(default_templates)}, delimiter=":" | |
), | |
] | |
) | |
environment = Environment( | |
loader=template_loader, | |
autoescape=True, | |
enable_async=True, | |
# undefined=StrictUndefined, | |
) | |
environment.filters["escape_css_string"] = escape_css_string | |
environment.filters["quote_plus"] = urllib.parse.quote_plus | |
self._jinja_env = environment | |
environment.filters["escape_sqlite"] = escape_sqlite | |
environment.filters["to_css_class"] = to_css_class | |
self._register_renderers() | |
self._permission_checks = collections.deque(maxlen=200) | |
self._root_token = secrets.token_hex(32) | |
self.client = DatasetteClient(self) | |
@property | |
def request_id(self): | |
return self._request_id.get() | |
async def apply_metadata_json(self): | |
# Apply any metadata entries from metadata.json to the internal tables | |
# step 1: top-level metadata | |
for key in self._metadata_local or {}: | |
if key == "databases": | |
continue | |
value = self._metadata_local[key] | |
await self.set_instance_metadata(key, _to_string(value)) | |
# step 2: database-level metadata | |
for dbname, db in self._metadata_local.get("databases", {}).items(): | |
for key, value in db.items(): | |
if key in ("tables", "queries"): | |
continue | |
await self.set_database_metadata(dbname, key, _to_string(value)) | |
# step 3: table-level metadata | |
for tablename, table in db.get("tables", {}).items(): | |
for key, value in table.items(): | |
if key == "columns": | |
continue | |
await self.set_resource_metadata( | |
dbname, tablename, key, _to_string(value) | |
) | |
# step 4: column-level metadata (only descriptions in metadata.json) | |
for columnname, column_description in table.get("columns", {}).items(): | |
await self.set_column_metadata( | |
dbname, tablename, columnname, "description", column_description | |
) | |
# TODO(alex) is metadata.json was loaded in, and --internal is not memory, then log | |
# a warning to user that they should delete their metadata.json file | |
def get_jinja_environment(self, request: Request = None) -> Environment: | |
environment = self._jinja_env | |
if request: | |
for environment in pm.hook.jinja2_environment_from_request( | |
datasette=self, request=request, env=environment | |
): | |
pass | |
return environment | |
def get_permission(self, name_or_abbr: str) -> "Permission": | |
""" | |
Returns a Permission object for the given name or abbreviation. Raises KeyError if not found. | |
""" | |
if name_or_abbr in self.permissions: | |
return self.permissions[name_or_abbr] | |
# Try abbreviation | |
for permission in self.permissions.values(): | |
if permission.abbr == name_or_abbr: | |
return permission | |
raise KeyError( | |
"No permission found with name or abbreviation {}".format(name_or_abbr) | |
) | |
async def refresh_schemas(self): | |
if self._refresh_schemas_lock.locked(): | |
return | |
async with self._refresh_schemas_lock: | |
await self._refresh_schemas() | |
async def _refresh_schemas(self): | |
internal_db = self.get_internal_database() | |
if not self.internal_db_created: | |
await init_internal_db(internal_db) | |
await self.apply_metadata_json() | |
self.internal_db_created = True | |
current_schema_versions = { | |
row["database_name"]: row["schema_version"] | |
for row in await internal_db.execute( | |
"select database_name, schema_version from catalog_databases" | |
) | |
} | |
for database_name, db in self.databases.items(): | |
schema_version = (await db.execute("PRAGMA schema_version")).first()[0] | |
# Compare schema versions to see if we should skip it | |
if schema_version == current_schema_versions.get(database_name): | |
continue | |
placeholders = "(?, ?, ?, ?)" | |
values = [database_name, str(db.path), db.is_memory, schema_version] | |
if db.path is None: | |
placeholders = "(?, null, ?, ?)" | |
values = [database_name, db.is_memory, schema_version] | |
await internal_db.execute_write( | |
""" | |
INSERT OR REPLACE INTO catalog_databases (database_name, path, is_memory, schema_version) | |
VALUES {} | |
""".format( | |
placeholders | |
), | |
values, | |
) | |
await populate_schema_tables(internal_db, db) | |
@property | |
def urls(self): | |
return Urls(self) | |
async def invoke_startup(self): | |
# This must be called for Datasette to be in a usable state | |
if self._startup_invoked: | |
return | |
# Register event classes | |
event_classes = [] | |
for hook in pm.hook.register_events(datasette=self): | |
extra_classes = await await_me_maybe(hook) | |
if extra_classes: | |
event_classes.extend(extra_classes) | |
self.event_classes = tuple(event_classes) | |
# Register permissions, but watch out for duplicate name/abbr | |
names = {} | |
abbrs = {} | |
for hook in pm.hook.register_permissions(datasette=self): | |
if hook: | |
for p in hook: | |
if p.name in names and p != names[p.name]: | |
raise StartupError( | |
"Duplicate permission name: {}".format(p.name) | |
) | |
if p.abbr and p.abbr in abbrs and p != abbrs[p.abbr]: | |
raise StartupError( | |
"Duplicate permission abbr: {}".format(p.abbr) | |
) | |
names[p.name] = p | |
if p.abbr: | |
abbrs[p.abbr] = p | |
self.permissions[p.name] = p | |
for hook in pm.hook.prepare_jinja2_environment( | |
env=self._jinja_env, datasette=self | |
): | |
await await_me_maybe(hook) | |
for hook in pm.hook.startup(datasette=self): | |
await await_me_maybe(hook) | |
self._startup_invoked = True | |
def sign(self, value, namespace="default"): | |
return URLSafeSerializer(self._secret, namespace).dumps(value) | |
def unsign(self, signed, namespace="default"): | |
return URLSafeSerializer(self._secret, namespace).loads(signed) | |
def create_token( | |
self, | |
actor_id: str, | |
*, | |
expires_after: Optional[int] = None, | |
restrict_all: Optional[Iterable[str]] = None, | |
restrict_database: Optional[Dict[str, Iterable[str]]] = None, | |
restrict_resource: Optional[Dict[str, Dict[str, Iterable[str]]]] = None, | |
): | |
token = {"a": actor_id, "t": int(time.time())} | |
if expires_after: | |
token["d"] = expires_after | |
def abbreviate_action(action): | |
# rename to abbr if possible | |
permission = self.permissions.get(action) | |
if not permission: | |
return action | |
return permission.abbr or action | |
if expires_after: | |
token["d"] = expires_after | |
if restrict_all or restrict_database or restrict_resource: | |
token["_r"] = {} | |
if restrict_all: | |
token["_r"]["a"] = [abbreviate_action(a) for a in restrict_all] | |
if restrict_database: | |
token["_r"]["d"] = {} | |
for database, actions in restrict_database.items(): | |
token["_r"]["d"][database] = [abbreviate_action(a) for a in actions] | |
if restrict_resource: | |
token["_r"]["r"] = {} | |
for database, resources in restrict_resource.items(): | |
for resource, actions in resources.items(): | |
token["_r"]["r"].setdefault(database, {})[resource] = [ | |
abbreviate_action(a) for a in actions | |
] | |
return "dstok_{}".format(self.sign(token, namespace="token")) | |
def get_database(self, name=None, route=None): | |
if route is not None: | |
matches = [db for db in self.databases.values() if db.route == route] | |
if not matches: | |
raise KeyError | |
return matches[0] | |
if name is None: | |
name = [key for key in self.databases.keys()][0] | |
return self.databases[name] | |
def add_database(self, db, name=None, route=None): | |
new_databases = self.databases.copy() | |
if name is None: | |
# Pick a unique name for this database | |
suggestion = db.suggest_name() | |
name = suggestion | |
else: | |
suggestion = name | |
i = 2 | |
while name in self.databases: | |
name = "{}_{}".format(suggestion, i) | |
i += 1 | |
db.name = name | |
db.route = route or name | |
new_databases[name] = db | |
# don't mutate! that causes race conditions with live import | |
self.databases = new_databases | |
return db | |
def add_memory_database(self, memory_name): | |
return self.add_database(Database(self, memory_name=memory_name)) | |
def remove_database(self, name): | |
new_databases = self.databases.copy() | |
new_databases.pop(name) | |
self.databases = new_databases | |
def setting(self, key): | |
return self._settings.get(key, None) | |
def settings_dict(self): | |
# Returns a fully resolved settings dictionary, useful for templates | |
return {option.name: self.setting(option.name) for option in SETTINGS} | |
def _metadata_recursive_update(self, orig, updated): | |
if not isinstance(orig, dict) or not isinstance(updated, dict): | |
return orig | |
for key, upd_value in updated.items(): | |
if isinstance(upd_value, dict) and isinstance(orig.get(key), dict): | |
orig[key] = self._metadata_recursive_update(orig[key], upd_value) | |
else: | |
orig[key] = upd_value | |
return orig | |
async def get_instance_metadata(self): | |
rows = await self.get_internal_database().execute( | |
""" | |
SELECT | |
key, | |
value | |
FROM metadata_instance | |
""" | |
) | |
return dict(rows) | |
async def get_database_metadata(self, database_name: str): | |
rows = await self.get_internal_database().execute( | |
""" | |
SELECT | |
key, | |
value | |
FROM metadata_databases | |
WHERE database_name = ? | |
""", | |
[database_name], | |
) | |
return dict(rows) | |
async def get_resource_metadata(self, database_name: str, resource_name: str): | |
rows = await self.get_internal_database().execute( | |
""" | |
SELECT | |
key, | |
value | |
FROM metadata_resources | |
WHERE database_name = ? | |
AND resource_name = ? | |
""", | |
[database_name, resource_name], | |
) | |
return dict(rows) | |
async def get_column_metadata( | |
self, database_name: str, resource_name: str, column_name: str | |
): | |
rows = await self.get_internal_database().execute( | |
""" | |
SELECT | |
key, | |
value | |
FROM metadata_columns | |
WHERE database_name = ? | |
AND resource_name = ? | |
AND column_name = ? | |
""", | |
[database_name, resource_name, column_name], | |
) | |
return dict(rows) | |
async def set_instance_metadata(self, key: str, value: str): | |
# TODO upsert only supported on SQLite 3.24.0 (2018-06-04) | |
await self.get_internal_database().execute_write( | |
""" | |
INSERT INTO metadata_instance(key, value) | |
VALUES(?, ?) | |
ON CONFLICT(key) DO UPDATE SET value = excluded.value; | |
""", | |
[key, value], | |
) | |
async def set_database_metadata(self, database_name: str, key: str, value: str): | |
# TODO upsert only supported on SQLite 3.24.0 (2018-06-04) | |
await self.get_internal_database().execute_write( | |
""" | |
INSERT INTO metadata_databases(database_name, key, value) | |
VALUES(?, ?, ?) | |
ON CONFLICT(database_name, key) DO UPDATE SET value = excluded.value; | |
""", | |
[database_name, key, value], | |
) | |
async def set_resource_metadata( | |
self, database_name: str, resource_name: str, key: str, value: str | |
): | |
# TODO upsert only supported on SQLite 3.24.0 (2018-06-04) | |
await self.get_internal_database().execute_write( | |
""" | |
INSERT INTO metadata_resources(database_name, resource_name, key, value) | |
VALUES(?, ?, ?, ?) | |
ON CONFLICT(database_name, resource_name, key) DO UPDATE SET value = excluded.value; | |
""", | |
[database_name, resource_name, key, value], | |
) | |
async def set_column_metadata( | |
self, | |
database_name: str, | |
resource_name: str, | |
column_name: str, | |
key: str, | |
value: str, | |
): | |
# TODO upsert only supported on SQLite 3.24.0 (2018-06-04) | |
await self.get_internal_database().execute_write( | |
""" | |
INSERT INTO metadata_columns(database_name, resource_name, column_name, key, value) | |
VALUES(?, ?, ?, ?, ?) | |
ON CONFLICT(database_name, resource_name, column_name, key) DO UPDATE SET value = excluded.value; | |
""", | |
[database_name, resource_name, column_name, key, value], | |
) | |
def get_internal_database(self): | |
return self._internal_database | |
def plugin_config(self, plugin_name, database=None, table=None, fallback=True): | |
"""Return config for plugin, falling back from specified database/table""" | |
if database is None and table is None: | |
config = self._plugin_config_top(plugin_name) | |
else: | |
config = self._plugin_config_nested(plugin_name, database, table, fallback) | |
return resolve_env_secrets(config, os.environ) | |
def _plugin_config_top(self, plugin_name): | |
"""Returns any top-level plugin configuration for the specified plugin.""" | |
return ((self.config or {}).get("plugins") or {}).get(plugin_name) | |
def _plugin_config_nested(self, plugin_name, database, table=None, fallback=True): | |
"""Returns any database or table-level plugin configuration for the specified plugin.""" | |
db_config = ((self.config or {}).get("databases") or {}).get(database) | |
# if there's no db-level configuration, then return early, falling back to top-level if needed | |
if not db_config: | |
return self._plugin_config_top(plugin_name) if fallback else None | |
db_plugin_config = (db_config.get("plugins") or {}).get(plugin_name) | |
if table: | |
table_plugin_config = ( | |
((db_config.get("tables") or {}).get(table) or {}).get("plugins") or {} | |
).get(plugin_name) | |
# fallback to db_config or top-level config, in that order, if needed | |
if table_plugin_config is None and fallback: | |
return db_plugin_config or self._plugin_config_top(plugin_name) | |
return table_plugin_config | |
# fallback to top-level if needed | |
if db_plugin_config is None and fallback: | |
self._plugin_config_top(plugin_name) | |
return db_plugin_config | |
def app_css_hash(self): | |
if not hasattr(self, "_app_css_hash"): | |
with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp: | |
self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[ | |
:6 | |
] | |
return self._app_css_hash | |
async def get_canned_queries(self, database_name, actor): | |
queries = ( | |
((self.config or {}).get("databases") or {}).get(database_name) or {} | |
).get("queries") or {} | |
for more_queries in pm.hook.canned_queries( | |
datasette=self, | |
database=database_name, | |
actor=actor, | |
): | |
more_queries = await await_me_maybe(more_queries) | |
queries.update(more_queries or {}) | |
# Fix any {"name": "select ..."} queries to be {"name": {"sql": "select ..."}} | |
for key in queries: | |
if not isinstance(queries[key], dict): | |
queries[key] = {"sql": queries[key]} | |
# Also make sure "name" is available: | |
queries[key]["name"] = key | |
return queries | |
async def get_canned_query(self, database_name, query_name, actor): | |
queries = await self.get_canned_queries(database_name, actor) | |
query = queries.get(query_name) | |
if query: | |
return query | |
def _prepare_connection(self, conn, database): | |
conn.row_factory = sqlite3.Row | |
conn.text_factory = lambda x: str(x, "utf-8", "replace") | |
if self.sqlite_extensions: | |
conn.enable_load_extension(True) | |
for extension in self.sqlite_extensions: | |
# "extension" is either a string path to the extension | |
# or a 2-item tuple that specifies which entrypoint to load. | |
if isinstance(extension, tuple): | |
path, entrypoint = extension | |
conn.execute("SELECT load_extension(?, ?)", [path, entrypoint]) | |
else: | |
conn.execute("SELECT load_extension(?)", [extension]) | |
if self.setting("cache_size_kb"): | |
conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}") | |
# pylint: disable=no-member | |
pm.hook.prepare_connection(conn=conn, database=database, datasette=self) | |
# If self.crossdb and this is _memory, connect the first SQLITE_LIMIT_ATTACHED databases | |
if self.crossdb and database == "_memory": | |
count = 0 | |
for db_name, db in self.databases.items(): | |
if count >= SQLITE_LIMIT_ATTACHED or db.is_memory: | |
continue | |
sql = 'ATTACH DATABASE "file:{path}?{qs}" AS [{name}];'.format( | |
path=db.path, | |
qs="mode=ro" if db.is_mutable else "immutable=1", | |
name=db_name, | |
) | |
conn.execute(sql) | |
count += 1 | |
def add_message(self, request, message, type=INFO): | |
if not hasattr(request, "_messages"): | |
request._messages = [] | |
request._messages_should_clear = False | |
request._messages.append((message, type)) | |
def _write_messages_to_response(self, request, response): | |
if getattr(request, "_messages", None): | |
# Set those messages | |
response.set_cookie("ds_messages", self.sign(request._messages, "messages")) | |
elif getattr(request, "_messages_should_clear", False): | |
response.set_cookie("ds_messages", "", expires=0, max_age=0) | |
def _show_messages(self, request): | |
if getattr(request, "_messages", None): | |
request._messages_should_clear = True | |
messages = request._messages | |
request._messages = [] | |
return messages | |
else: | |
return [] | |
async def _crumb_items(self, request, table=None, database=None): | |
crumbs = [] | |
actor = None | |
if request: | |
actor = request.actor | |
# Top-level link | |
if await self.permission_allowed(actor=actor, action="view-instance"): | |
crumbs.append({"href": self.urls.instance(), "label": "home"}) | |
# Database link | |
if database: | |
if await self.permission_allowed( | |
actor=actor, | |
action="view-database", | |
resource=database, | |
): | |
crumbs.append( | |
{ | |
"href": self.urls.database(database), | |
"label": database, | |
} | |
) | |
# Table link | |
if table: | |
assert database, "table= requires database=" | |
if await self.permission_allowed( | |
actor=actor, | |
action="view-table", | |
resource=(database, table), | |
): | |
crumbs.append( | |
{ | |
"href": self.urls.table(database, table), | |
"label": table, | |
} | |
) | |
return crumbs | |
async def actors_from_ids( | |
self, actor_ids: Iterable[Union[str, int]] | |
) -> Dict[Union[id, str], Dict]: | |
result = pm.hook.actors_from_ids(datasette=self, actor_ids=actor_ids) | |
if result is None: | |
# Do the default thing | |
return {actor_id: {"id": actor_id} for actor_id in actor_ids} | |
result = await await_me_maybe(result) | |
return result | |
async def track_event(self, event: Event): | |
assert isinstance(event, self.event_classes), "Invalid event type: {}".format( | |
type(event) | |
) | |
for hook in pm.hook.track_event(datasette=self, event=event): | |
await await_me_maybe(hook) | |
async def permission_allowed( | |
self, actor, action, resource=None, *, default=DEFAULT_NOT_SET | |
): | |
"""Check permissions using the permissions_allowed plugin hook""" | |
result = None | |
# Use default from registered permission, if available | |
if default is DEFAULT_NOT_SET and action in self.permissions: | |
default = self.permissions[action].default | |
opinions = [] | |
# Every plugin is consulted for their opinion | |
for check in pm.hook.permission_allowed( | |
datasette=self, | |
actor=actor, | |
action=action, | |
resource=resource, | |
): | |
check = await await_me_maybe(check) | |
if check is not None: | |
opinions.append(check) | |
result = None | |
# If any plugin said False it's false - the veto rule | |
if any(not r for r in opinions): | |
result = False | |
elif any(r for r in opinions): | |
# Otherwise, if any plugin said True it's true | |
result = True | |
used_default = False | |
if result is None: | |
# No plugin expressed an opinion, so use the default | |
result = default | |
used_default = True | |
self._permission_checks.append( | |
{ | |
"request_id": self.request_id, | |
"when": datetime.datetime.now(datetime.timezone.utc).isoformat(), | |
"actor": actor, | |
"action": action, | |
"resource": resource, | |
"used_default": used_default, | |
"result": result, | |
} | |
) | |
return result | |
async def ensure_permissions( | |
self, | |
actor: dict, | |
permissions: Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]], | |
): | |
""" | |
permissions is a list of (action, resource) tuples or 'action' strings | |
Raises datasette.Forbidden() if any of the checks fail | |
""" | |
assert actor is None or isinstance(actor, dict), "actor must be None or a dict" | |
for permission in permissions: | |
if isinstance(permission, str): | |
action = permission | |
resource = None | |
elif isinstance(permission, (tuple, list)) and len(permission) == 2: | |
action, resource = permission | |
else: | |
assert ( | |
False | |
), "permission should be string or tuple of two items: {}".format( | |
repr(permission) | |
) | |
ok = await self.permission_allowed( | |
actor, | |
action, | |
resource=resource, | |
default=None, | |
) | |
if ok is not None: | |
if ok: | |
return | |
else: | |
raise Forbidden(action) | |
async def check_visibility( | |
self, | |
actor: dict, | |
action: Optional[str] = None, | |
resource: Optional[Union[str, Tuple[str, str]]] = None, | |
permissions: Optional[ | |
Sequence[Union[Tuple[str, Union[str, Tuple[str, str]]], str]] | |
] = None, | |
): | |
"""Returns (visible, private) - visible = can you see it, private = can others see it too""" | |
if permissions: | |
assert ( | |
not action and not resource | |
), "Can't use action= or resource= with permissions=" | |
else: | |
permissions = [(action, resource)] | |
try: | |
await self.ensure_permissions(actor, permissions) | |
except Forbidden: | |
return False, False | |
# User can see it, but can the anonymous user see it? | |
try: | |
await self.ensure_permissions(None, permissions) | |
except Forbidden: | |
# It's visible but private | |
return True, True | |
# It's visible to everyone | |
return True, False | |
async def execute( | |
self, | |
db_name, | |
sql, | |
params=None, | |
truncate=False, | |
custom_time_limit=None, | |
page_size=None, | |
log_sql_errors=True, | |
): | |
return await self.databases[db_name].execute( | |
sql, | |
params=params, | |
truncate=truncate, | |
custom_time_limit=custom_time_limit, | |
page_size=page_size, | |
log_sql_errors=log_sql_errors, | |
) | |
async def expand_foreign_keys(self, actor, database, table, column, values): | |
"""Returns dict mapping (column, value) -> label""" | |
labeled_fks = {} | |
db = self.databases[database] | |
foreign_keys = await db.foreign_keys_for_table(table) | |
# Find the foreign_key for this column | |
try: | |
fk = [ | |
foreign_key | |
for foreign_key in foreign_keys | |
if foreign_key["column"] == column | |
][0] | |
except IndexError: | |
return {} | |
# Ensure user has permission to view the referenced table | |
other_table = fk["other_table"] | |
other_column = fk["other_column"] | |
visible, _ = await self.check_visibility( | |
actor, | |
permissions=[ | |
("view-table", (database, other_table)), | |
("view-database", database), | |
"view-instance", | |
], | |
) | |
if not visible: | |
return {} | |
label_column = await db.label_column_for_table(other_table) | |
if not label_column: | |
return {(fk["column"], value): str(value) for value in values} | |
labeled_fks = {} | |
sql = """ | |
select {other_column}, {label_column} | |
from {other_table} | |
where {other_column} in ({placeholders}) | |
""".format( | |
other_column=escape_sqlite(other_column), | |
label_column=escape_sqlite(label_column), | |
other_table=escape_sqlite(other_table), | |
placeholders=", ".join(["?"] * len(set(values))), | |
) | |
try: | |
results = await self.execute(database, sql, list(set(values))) | |
except QueryInterrupted: | |
pass | |
else: | |
for id, value in results: | |
labeled_fks[(fk["column"], id)] = value | |
return labeled_fks | |
def absolute_url(self, request, path): | |
url = urllib.parse.urljoin(request.url, path) | |
if url.startswith("http://") and self.setting("force_https_urls"): | |
url = "https://" + url[len("http://") :] | |
return url | |
def _connected_databases(self): | |
return [ | |
{ | |
"name": d.name, | |
"route": d.route, | |
"path": d.path, | |
"size": d.size, | |
"is_mutable": d.is_mutable, | |
"is_memory": d.is_memory, | |
"hash": d.hash, | |
} | |
for name, d in self.databases.items() | |
] | |
def _versions(self): | |
conn = sqlite3.connect(":memory:") | |
self._prepare_connection(conn, "_memory") | |
sqlite_version = conn.execute("select sqlite_version()").fetchone()[0] | |
sqlite_extensions = {"json1": detect_json1(conn)} | |
for extension, testsql, hasversion in ( | |
("spatialite", "SELECT spatialite_version()", True), | |
): | |
try: | |
result = conn.execute(testsql) | |
if hasversion: | |
sqlite_extensions[extension] = result.fetchone()[0] | |
else: | |
sqlite_extensions[extension] = None | |
except Exception: | |
pass | |
# More details on SpatiaLite | |
if "spatialite" in sqlite_extensions: | |
spatialite_details = {} | |
for fn in SPATIALITE_FUNCTIONS: | |
try: | |
result = conn.execute("select {}()".format(fn)) | |
spatialite_details[fn] = result.fetchone()[0] | |
except Exception as e: | |
spatialite_details[fn] = {"error": str(e)} | |
sqlite_extensions["spatialite"] = spatialite_details | |
# Figure out supported FTS versions | |
fts_versions = [] | |
for fts in ("FTS5", "FTS4", "FTS3"): | |
try: | |
conn.execute( | |
"CREATE VIRTUAL TABLE v{fts} USING {fts} (data)".format(fts=fts) | |
) | |
fts_versions.append(fts) | |
except sqlite3.OperationalError: | |
continue | |
datasette_version = {"version": __version__} | |
if self.version_note: | |
datasette_version["note"] = self.version_note | |
try: | |
# Optional import to avoid breaking Pyodide | |
# https://github.com/simonw/datasette/issues/1733#issuecomment-1115268245 | |
import uvicorn | |
uvicorn_version = uvicorn.__version__ | |
except ImportError: | |
uvicorn_version = None | |
info = { | |
"python": { | |
"version": ".".join(map(str, sys.version_info[:3])), | |
"full": sys.version, | |
}, | |
"datasette": datasette_version, | |
"asgi": "3.0", | |
"uvicorn": uvicorn_version, | |
"sqlite": { | |
"version": sqlite_version, | |
"fts_versions": fts_versions, | |
"extensions": sqlite_extensions, | |
"compile_options": [ | |
r[0] for r in conn.execute("pragma compile_options;").fetchall() | |
], | |
}, | |
} | |
if using_pysqlite3: | |
for package in ("pysqlite3", "pysqlite3-binary"): | |
try: | |
info["pysqlite3"] = importlib.metadata.version(package) | |
break | |
except importlib.metadata.PackageNotFoundError: | |
pass | |
return info | |
def _plugins(self, request=None, all=False): | |
ps = list(get_plugins()) | |
should_show_all = False | |
if request is not None: | |
should_show_all = request.args.get("all") | |
else: | |
should_show_all = all | |
if not should_show_all: | |
ps = [p for p in ps if p["name"] not in DEFAULT_PLUGINS] | |
ps.sort(key=lambda p: p["name"]) | |
return [ | |
{ | |
"name": p["name"], | |
"static": p["static_path"] is not None, | |
"templates": p["templates_path"] is not None, | |
"version": p.get("version"), | |
"hooks": list(sorted(set(p["hooks"]))), | |
} | |
for p in ps | |
] | |
def _threads(self): | |
if self.setting("num_sql_threads") == 0: | |
return {"num_threads": 0, "threads": []} | |
threads = list(threading.enumerate()) | |
d = { | |
"num_threads": len(threads), | |
"threads": [ | |
{"name": t.name, "ident": t.ident, "daemon": t.daemon} for t in threads | |
], | |
} | |
tasks = asyncio.all_tasks() | |
d.update( | |
{ | |
"num_tasks": len(tasks), | |
"tasks": [_cleaner_task_str(t) for t in tasks], | |
} | |
) | |
return d | |
def _actor(self, request): | |
return {"actor": request.actor} | |
async def table_config(self, database: str, table: str) -> dict: | |
"""Return dictionary of configuration for specified table""" | |
return ( | |
(self.config or {}) | |
.get("databases", {}) | |
.get(database, {}) | |
.get("tables", {}) | |
.get(table, {}) | |
) | |
def _register_renderers(self): | |
"""Register output renderers which output data in custom formats.""" | |
# Built-in renderers | |
self.renderers["json"] = (json_renderer, lambda: True) | |
# Hooks | |
hook_renderers = [] | |
# pylint: disable=no-member | |
for hook in pm.hook.register_output_renderer(datasette=self): | |
if type(hook) is list: | |
hook_renderers += hook | |
else: | |
hook_renderers.append(hook) | |
for renderer in hook_renderers: | |
self.renderers[renderer["extension"]] = ( | |
# It used to be called "callback" - remove this in Datasette 1.0 | |
renderer.get("render") or renderer["callback"], | |
renderer.get("can_render") or (lambda: True), | |
) | |
async def render_template( | |
self, | |
templates: Union[List[str], str, Template], | |
context: Optional[Union[Dict[str, Any], Context]] = None, | |
request: Optional[Request] = None, | |
view_name: Optional[str] = None, | |
): | |
if not self._startup_invoked: | |
raise Exception("render_template() called before await ds.invoke_startup()") | |
context = context or {} | |
if isinstance(templates, Template): | |
template = templates | |
else: | |
if isinstance(templates, str): | |
templates = [templates] | |
template = self.get_jinja_environment(request).select_template(templates) | |
if dataclasses.is_dataclass(context): | |
context = dataclasses.asdict(context) | |
body_scripts = [] | |
# pylint: disable=no-member | |
for extra_script in pm.hook.extra_body_script( | |
template=template.name, | |
database=context.get("database"), | |
table=context.get("table"), | |
columns=context.get("columns"), | |
view_name=view_name, | |
request=request, | |
datasette=self, | |
): | |
extra_script = await await_me_maybe(extra_script) | |
if isinstance(extra_script, dict): | |
script = extra_script["script"] | |
module = bool(extra_script.get("module")) | |
else: | |
script = extra_script | |
module = False | |
body_scripts.append({"script": Markup(script), "module": module}) | |
extra_template_vars = {} | |
# pylint: disable=no-member | |
for extra_vars in pm.hook.extra_template_vars( | |
template=template.name, | |
database=context.get("database"), | |
table=context.get("table"), | |
columns=context.get("columns"), | |
view_name=view_name, | |
request=request, | |
datasette=self, | |
): | |
extra_vars = await await_me_maybe(extra_vars) | |
assert isinstance(extra_vars, dict), "extra_vars is of type {}".format( | |
type(extra_vars) | |
) | |
extra_template_vars.update(extra_vars) | |
async def menu_links(): | |
links = [] | |
for hook in pm.hook.menu_links( | |
datasette=self, | |
actor=request.actor if request else None, | |
request=request or None, | |
): | |
extra_links = await await_me_maybe(hook) | |
if extra_links: | |
links.extend(extra_links) | |
return links | |
template_context = { | |
**context, | |
**{ | |
"request": request, | |
"crumb_items": self._crumb_items, | |
"urls": self.urls, | |
"actor": request.actor if request else None, | |
"menu_links": menu_links, | |
"display_actor": display_actor, | |
"show_logout": request is not None | |
and "ds_actor" in request.cookies | |
and request.actor, | |
"app_css_hash": self.app_css_hash(), | |
"zip": zip, | |
"body_scripts": body_scripts, | |
"format_bytes": format_bytes, | |
"show_messages": lambda: self._show_messages(request), | |
"extra_css_urls": await self._asset_urls( | |
"extra_css_urls", template, context, request, view_name | |
), | |
"extra_js_urls": await self._asset_urls( | |
"extra_js_urls", template, context, request, view_name | |
), | |
"base_url": self.setting("base_url"), | |
"csrftoken": request.scope["csrftoken"] if request else lambda: "", | |
"datasette_version": __version__, | |
}, | |
**extra_template_vars, | |
} | |
if request and request.args.get("_context") and self.setting("template_debug"): | |
return "<pre>{}</pre>".format( | |
escape(json.dumps(template_context, default=repr, indent=4)) | |
) | |
return await template.render_async(template_context) | |
def set_actor_cookie( | |
self, response: Response, actor: dict, expire_after: Optional[int] = None | |
): | |
data = {"a": actor} | |
if expire_after: | |
expires_at = int(time.time()) + (24 * 60 * 60) | |
data["e"] = baseconv.base62.encode(expires_at) | |
response.set_cookie("ds_actor", self.sign(data, "actor")) | |
def delete_actor_cookie(self, response: Response): | |
response.set_cookie("ds_actor", "", expires=0, max_age=0) | |
async def _asset_urls(self, key, template, context, request, view_name): | |
# Flatten list-of-lists from plugins: | |
seen_urls = set() | |
collected = [] | |
for hook in getattr(pm.hook, key)( | |
template=template.name, | |
database=context.get("database"), | |
table=context.get("table"), | |
columns=context.get("columns"), | |
view_name=view_name, | |
request=request, | |
datasette=self, | |
): | |
hook = await await_me_maybe(hook) | |
collected.extend(hook) | |
collected.extend((self.config or {}).get(key) or []) | |
output = [] | |
for url_or_dict in collected: | |
if isinstance(url_or_dict, dict): | |
url = url_or_dict["url"] | |
sri = url_or_dict.get("sri") | |
module = bool(url_or_dict.get("module")) | |
else: | |
url = url_or_dict | |
sri = None | |
module = False | |
if url in seen_urls: | |
continue | |
seen_urls.add(url) | |
if url.startswith("/"): | |
# Take base_url into account: | |
url = self.urls.path(url) | |
script = {"url": url} | |
if sri: | |
script["sri"] = sri | |
if module: | |
script["module"] = True | |
output.append(script) | |
return output | |
def _config(self): | |
return redact_keys( | |
self.config, ("secret", "key", "password", "token", "hash", "dsn") | |
) | |
def _routes(self): | |
routes = [] | |
for routes_to_add in pm.hook.register_routes(datasette=self): | |
for regex, view_fn in routes_to_add: | |
routes.append((regex, wrap_view(view_fn, self))) | |
def add_route(view, regex): | |
routes.append((regex, view)) | |
add_route(IndexView.as_view(self), r"/(\.(?P<format>jsono?))?$") | |
add_route(IndexView.as_view(self), r"/-/(\.(?P<format>jsono?))?$") | |
add_route(permanent_redirect("/-/"), r"/-$") | |
# TODO: /favicon.ico and /-/static/ deserve far-future cache expires | |
add_route(favicon, "/favicon.ico") | |
add_route( | |
asgi_static(app_root / "datasette" / "static"), r"/-/static/(?P<path>.*)$" | |
) | |
for path, dirname in self.static_mounts: | |
add_route(asgi_static(dirname), r"/" + path + "/(?P<path>.*)$") | |
# Mount any plugin static/ directories | |
for plugin in get_plugins(): | |
if plugin["static_path"]: | |
add_route( | |
asgi_static(plugin["static_path"]), | |
f"/-/static-plugins/{plugin['name']}/(?P<path>.*)$", | |
) | |
# Support underscores in name in addition to hyphens, see https://github.com/simonw/datasette/issues/611 | |
add_route( | |
asgi_static(plugin["static_path"]), | |
"/-/static-plugins/{}/(?P<path>.*)$".format( | |
plugin["name"].replace("-", "_") | |
), | |
) | |
add_route( | |
permanent_redirect( | |
"/_memory", forward_query_string=True, forward_rest=True | |
), | |
r"/:memory:(?P<rest>.*)$", | |
) | |
add_route( | |
JsonDataView.as_view(self, "versions.json", self._versions), | |
r"/-/versions(\.(?P<format>json))?$", | |
) | |
add_route( | |
JsonDataView.as_view( | |
self, "plugins.json", self._plugins, needs_request=True | |
), | |
r"/-/plugins(\.(?P<format>json))?$", | |
) | |
add_route( | |
JsonDataView.as_view(self, "settings.json", lambda: self._settings), | |
r"/-/settings(\.(?P<format>json))?$", | |
) | |
add_route( | |
JsonDataView.as_view(self, "config.json", lambda: self._config()), | |
r"/-/config(\.(?P<format>json))?$", | |
) | |
add_route( | |
JsonDataView.as_view(self, "threads.json", self._threads), | |
r"/-/threads(\.(?P<format>json))?$", | |
) | |
add_route( | |
JsonDataView.as_view(self, "databases.json", self._connected_databases), | |
r"/-/databases(\.(?P<format>json))?$", | |
) | |
add_route( | |
JsonDataView.as_view( | |
self, "actor.json", self._actor, needs_request=True, permission=None | |
), | |
r"/-/actor(\.(?P<format>json))?$", | |
) | |
add_route( | |
AuthTokenView.as_view(self), | |
r"/-/auth-token$", | |
) | |
add_route( | |
CreateTokenView.as_view(self), | |
r"/-/create-token$", | |
) | |
add_route( | |
ApiExplorerView.as_view(self), | |
r"/-/api$", | |
) | |
add_route( | |
LogoutView.as_view(self), | |
r"/-/logout$", | |
) | |
add_route( | |
PermissionsDebugView.as_view(self), | |
r"/-/permissions$", | |
) | |
add_route( | |
MessagesDebugView.as_view(self), | |
r"/-/messages$", | |
) | |
add_route( | |
AllowDebugView.as_view(self), | |
r"/-/allow-debug$", | |
) | |
add_route( | |
wrap_view(PatternPortfolioView, self), | |
r"/-/patterns$", | |
) | |
add_route( | |
wrap_view(database_download, self), | |
r"/(?P<database>[^\/\.]+)\.db$", | |
) | |
add_route( | |
wrap_view(DatabaseView, self), | |
r"/(?P<database>[^\/\.]+)(\.(?P<format>\w+))?$", | |
) | |
add_route(TableCreateView.as_view(self), r"/(?P<database>[^\/\.]+)/-/create$") | |
add_route( | |
wrap_view(QueryView, self), | |
r"/(?P<database>[^\/\.]+)/-/query(\.(?P<format>\w+))?$", | |
) | |
add_route( | |
wrap_view(table_view, self), | |
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)(\.(?P<format>\w+))?$", | |
) | |
add_route( | |
RowView.as_view(self), | |
r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)(\.(?P<format>\w+))?$", | |
) | |
add_route( | |
TableInsertView.as_view(self), | |
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)/-/insert$", | |
) | |
add_route( | |
TableUpsertView.as_view(self), | |
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)/-/upsert$", | |
) | |
add_route( | |
TableDropView.as_view(self), | |
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)/-/drop$", | |
) | |
add_route( | |
RowDeleteView.as_view(self), | |
r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)/-/delete$", | |
) | |
add_route( | |
RowUpdateView.as_view(self), | |
r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)/-/update$", | |
) | |
return [ | |
# Compile any strings to regular expressions | |
((re.compile(pattern) if isinstance(pattern, str) else pattern), view) | |
for pattern, view in routes | |
] | |
async def resolve_database(self, request): | |
database_route = tilde_decode(request.url_vars["database"]) | |
try: | |
return self.get_database(route=database_route) | |
except KeyError: | |
raise DatabaseNotFound(database_route) | |
async def resolve_table(self, request): | |
db = await self.resolve_database(request) | |
table_name = tilde_decode(request.url_vars["table"]) | |
# Table must exist | |
is_view = False | |
table_exists = await db.table_exists(table_name) | |
if not table_exists: | |
is_view = await db.view_exists(table_name) | |
if not (table_exists or is_view): | |
raise TableNotFound(db.name, table_name) | |
return ResolvedTable(db, table_name, is_view) | |
async def resolve_row(self, request): | |
db, table_name, _ = await self.resolve_table(request) | |
pk_values = urlsafe_components(request.url_vars["pks"]) | |
sql, params, pks = await row_sql_params_pks(db, table_name, pk_values) | |
results = await db.execute(sql, params, truncate=True) | |
row = results.first() | |
if row is None: | |
raise RowNotFound(db.name, table_name, pk_values) | |
return ResolvedRow(db, table_name, sql, params, pks, pk_values, results.first()) | |
def app(self): | |
"""Returns an ASGI app function that serves the whole of Datasette""" | |
routes = self._routes() | |
async def setup_db(): | |
# First time server starts up, calculate table counts for immutable databases | |
for database in self.databases.values(): | |
if not database.is_mutable: | |
await database.table_counts(limit=60 * 60 * 1000) | |
async def custom_csrf_error(scope, send, message_id): | |
await asgi_send( | |
send, | |
content=await self.render_template( | |
"csrf_error.html", | |
{"message_id": message_id, "message_name": Errors(message_id).name}, | |
), | |
status=403, | |
content_type="text/html; charset=utf-8", | |
) | |
asgi = asgi_csrf.asgi_csrf( | |
DatasetteRouter(self, routes), | |
signing_secret=self._secret, | |
cookie_name="ds_csrftoken", | |
skip_if_scope=lambda scope: any( | |
pm.hook.skip_csrf(datasette=self, scope=scope) | |
), | |
send_csrf_failed=custom_csrf_error, | |
) | |
if self.setting("trace_debug"): | |
asgi = AsgiTracer(asgi) | |
asgi = AsgiLifespan(asgi) | |
asgi = AsgiRunOnFirstRequest(asgi, on_startup=[setup_db, self.invoke_startup]) | |
for wrapper in pm.hook.asgi_wrapper(datasette=self): | |
asgi = wrapper(asgi) | |
return asgi | |
class DatasetteRouter: | |
def __init__(self, datasette, routes): | |
self.ds = datasette | |
self.routes = routes or [] | |
async def __call__(self, scope, receive, send): | |
# Because we care about "foo/bar" v.s. "foo%2Fbar" we decode raw_path ourselves | |
path = scope["path"] | |
raw_path = scope.get("raw_path") | |
if raw_path: | |
path = raw_path.decode("ascii") | |
path = path.partition("?")[0] | |
token = self.ds._request_id.set(str(uuid.uuid4())) | |
try: | |
return await self.route_path(scope, receive, send, path) | |
finally: | |
self.ds._request_id.reset(token) | |
async def route_path(self, scope, receive, send, path): | |
# Strip off base_url if present before routing | |
base_url = self.ds.setting("base_url") | |
if base_url != "/" and path.startswith(base_url): | |
path = "/" + path[len(base_url) :] | |
scope = dict(scope, route_path=path) | |
request = Request(scope, receive) | |
# Populate request_messages if ds_messages cookie is present | |
try: | |
request._messages = self.ds.unsign( | |
request.cookies.get("ds_messages", ""), "messages" | |
) | |
except BadSignature: | |
pass | |
scope_modifications = {} | |
# Apply force_https_urls, if set | |
if ( | |
self.ds.setting("force_https_urls") | |
and scope["type"] == "http" | |
and scope.get("scheme") != "https" | |
): | |
scope_modifications["scheme"] = "https" | |
# Handle authentication | |
default_actor = scope.get("actor") or None | |
actor = None | |
for actor in pm.hook.actor_from_request(datasette=self.ds, request=request): | |
actor = await await_me_maybe(actor) | |
if actor: | |
break | |
scope_modifications["actor"] = actor or default_actor | |
scope = dict(scope, **scope_modifications) | |
match, view = resolve_routes(self.routes, path) | |
if match is None: | |
return await self.handle_404(request, send) | |
new_scope = dict(scope, url_route={"kwargs": match.groupdict()}) | |
request.scope = new_scope | |
try: | |
response = await view(request, send) | |
if response: | |
self.ds._write_messages_to_response(request, response) | |
await response.asgi_send(send) | |
return | |
except NotFound as exception: | |
return await self.handle_404(request, send, exception) | |
except Forbidden as exception: | |
# Try the forbidden() plugin hook | |
for custom_response in pm.hook.forbidden( | |
datasette=self.ds, request=request, message=exception.args[0] | |
): | |
custom_response = await await_me_maybe(custom_response) | |
assert ( | |
custom_response | |
), "Default forbidden() hook should have been called" | |
return await custom_response.asgi_send(send) | |
except Exception as exception: | |
return await self.handle_exception(request, send, exception) | |
async def handle_404(self, request, send, exception=None): | |
# If path contains % encoding, redirect to tilde encoding | |
if "%" in request.path: | |
# Try the same path but with "%" replaced by "~" | |
# and "~" replaced with "~7E" | |
# and "." replaced with "~2E" | |
new_path = ( | |
request.path.replace("~", "~7E").replace("%", "~").replace(".", "~2E") | |
) | |
if request.query_string: | |
new_path += "?{}".format(request.query_string) | |
await asgi_send_redirect(send, new_path) | |
return | |
# If URL has a trailing slash, redirect to URL without it | |
path = request.scope.get( | |
"raw_path", request.scope["path"].encode("utf8") | |
).partition(b"?")[0] | |
context = {} | |
if path.endswith(b"/"): | |
path = path.rstrip(b"/") | |
if request.scope["query_string"]: | |
path += b"?" + request.scope["query_string"] | |
await asgi_send_redirect(send, path.decode("latin1")) | |
else: | |
# Is there a pages/* template matching this path? | |
route_path = request.scope.get("route_path", request.scope["path"]) | |
# Jinja requires template names to use "/" even on Windows | |
template_name = "pages" + route_path + ".html" | |
# Build a list of pages/blah/{name}.html matching expressions | |
environment = self.ds.get_jinja_environment(request) | |
pattern_templates = [ | |
filepath | |
for filepath in environment.list_templates() | |
if "{" in filepath and filepath.startswith("pages/") | |
] | |
page_routes = [ | |
(route_pattern_from_filepath(filepath[len("pages/") :]), filepath) | |
for filepath in pattern_templates | |
] | |
try: | |
template = environment.select_template([template_name]) | |
except TemplateNotFound: | |
template = None | |
if template is None: | |
# Try for a pages/blah/{name}.html template match | |
for regex, wildcard_template in page_routes: | |
match = regex.match(route_path) | |
if match is not None: | |
context.update(match.groupdict()) | |
template = wildcard_template | |
break | |
if template: | |
headers = {} | |
status = [200] | |
def custom_header(name, value): | |
headers[name] = value | |
return "" | |
def custom_status(code): | |
status[0] = code | |
return "" | |
def custom_redirect(location, code=302): | |
status[0] = code | |
headers["Location"] = location | |
return "" | |
def raise_404(message=""): | |
raise NotFoundExplicit(message) | |
context.update( | |
{ | |
"custom_header": custom_header, | |
"custom_status": custom_status, | |
"custom_redirect": custom_redirect, | |
"raise_404": raise_404, | |
} | |
) | |
try: | |
body = await self.ds.render_template( | |
template, | |
context, | |
request=request, | |
view_name="page", | |
) | |
except NotFoundExplicit as e: | |
await self.handle_exception(request, send, e) | |
return | |
# Pull content-type out into separate parameter | |
content_type = "text/html; charset=utf-8" | |
matches = [k for k in headers if k.lower() == "content-type"] | |
if matches: | |
content_type = headers[matches[0]] | |
await asgi_send( | |
send, | |
body, | |
status=status[0], | |
headers=headers, | |
content_type=content_type, | |
) | |
else: | |
await self.handle_exception(request, send, exception or NotFound("404")) | |
async def handle_exception(self, request, send, exception): | |
responses = [] | |
for hook in pm.hook.handle_exception( | |
datasette=self.ds, | |
request=request, | |
exception=exception, | |
): | |
response = await await_me_maybe(hook) | |
if response is not None: | |
responses.append(response) | |
assert responses, "Default exception handler should have returned something" | |
# Even if there are multiple responses use just the first one | |
response = responses[0] | |
await response.asgi_send(send) | |
_cleaner_task_str_re = re.compile(r"\S*site-packages/") | |
def _cleaner_task_str(task): | |
s = str(task) | |
# This has something like the following in it: | |
# running at /Users/simonw/Dropbox/Development/datasette/venv-3.7.5/lib/python3.7/site-packages/uvicorn/main.py:361> | |
# Clean up everything up to and including site-packages | |
return _cleaner_task_str_re.sub("", s) | |
def wrap_view(view_fn_or_class, datasette): | |
is_function = isinstance(view_fn_or_class, types.FunctionType) | |
if is_function: | |
return wrap_view_function(view_fn_or_class, datasette) | |
else: | |
if not isinstance(view_fn_or_class, type): | |
raise ValueError("view_fn_or_class must be a function or a class") | |
return wrap_view_class(view_fn_or_class, datasette) | |
def wrap_view_class(view_class, datasette): | |
async def async_view_for_class(request, send): | |
instance = view_class() | |
if inspect.iscoroutinefunction(instance.__call__): | |
return await async_call_with_supported_arguments( | |
instance.__call__, | |
scope=request.scope, | |
receive=request.receive, | |
send=send, | |
request=request, | |
datasette=datasette, | |
) | |
else: | |
return call_with_supported_arguments( | |
instance.__call__, | |
scope=request.scope, | |
receive=request.receive, | |
send=send, | |
request=request, | |
datasette=datasette, | |
) | |
async_view_for_class.view_class = view_class | |
return async_view_for_class | |
def wrap_view_function(view_fn, datasette): | |
@functools.wraps(view_fn) | |
async def async_view_fn(request, send): | |
if inspect.iscoroutinefunction(view_fn): | |
response = await async_call_with_supported_arguments( | |
view_fn, | |
scope=request.scope, | |
receive=request.receive, | |
send=send, | |
request=request, | |
datasette=datasette, | |
) | |
else: | |
response = call_with_supported_arguments( | |
view_fn, | |
scope=request.scope, | |
receive=request.receive, | |
send=send, | |
request=request, | |
datasette=datasette, | |
) | |
if response is not None: | |
return response | |
return async_view_fn | |
def permanent_redirect(path, forward_query_string=False, forward_rest=False): | |
return wrap_view( | |
lambda request, send: Response.redirect( | |
path | |
+ (request.url_vars["rest"] if forward_rest else "") | |
+ ( | |
("?" + request.query_string) | |
if forward_query_string and request.query_string | |
else "" | |
), | |
status=301, | |
), | |
datasette=None, | |
) | |
_curly_re = re.compile(r"({.*?})") | |
def route_pattern_from_filepath(filepath): | |
# Drop the ".html" suffix | |
if filepath.endswith(".html"): | |
filepath = filepath[: -len(".html")] | |
re_bits = ["/"] | |
for bit in _curly_re.split(filepath): | |
if _curly_re.match(bit): | |
re_bits.append(f"(?P<{bit[1:-1]}>[^/]*)") | |
else: | |
re_bits.append(re.escape(bit)) | |
return re.compile("^" + "".join(re_bits) + "$") | |
class NotFoundExplicit(NotFound): | |
pass | |
class DatasetteClient: | |
def __init__(self, ds): | |
self.ds = ds | |
self.app = ds.app() | |
def actor_cookie(self, actor): | |
# Utility method, mainly for tests | |
return self.ds.sign({"a": actor}, "actor") | |
def _fix(self, path, avoid_path_rewrites=False): | |
if not isinstance(path, PrefixedUrlString) and not avoid_path_rewrites: | |
path = self.ds.urls.path(path) | |
if path.startswith("/"): | |
path = f"http://localhost{path}" | |
return path | |
async def _request(self, method, path, **kwargs): | |
async with httpx.AsyncClient( | |
transport=httpx.ASGITransport(app=self.app), | |
cookies=kwargs.pop("cookies", None), | |
) as client: | |
return await getattr(client, method)(self._fix(path), **kwargs) | |
async def get(self, path, **kwargs): | |
return await self._request("get", path, **kwargs) | |
async def options(self, path, **kwargs): | |
return await self._request("options", path, **kwargs) | |
async def head(self, path, **kwargs): | |
return await self._request("head", path, **kwargs) | |
async def post(self, path, **kwargs): | |
return await self._request("post", path, **kwargs) | |
async def put(self, path, **kwargs): | |
return await self._request("put", path, **kwargs) | |
async def patch(self, path, **kwargs): | |
return await self._request("patch", path, **kwargs) | |
async def delete(self, path, **kwargs): | |
return await self._request("delete", path, **kwargs) | |
async def request(self, method, path, **kwargs): | |
avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) | |
async with httpx.AsyncClient( | |
transport=httpx.ASGITransport(app=self.app), | |
cookies=kwargs.pop("cookies", None), | |
) as client: | |
return await client.request( | |
method, self._fix(path, avoid_path_rewrites), **kwargs | |
) | |
</document_content> | |
</document> | |
<document index="5"> | |
<source>datasette/blob_renderer.py</source> | |
<document_content> | |
from datasette import hookimpl | |
from datasette.utils.asgi import Response, BadRequest | |
from datasette.utils import to_css_class | |
import hashlib | |
_BLOB_COLUMN = "_blob_column" | |
_BLOB_HASH = "_blob_hash" | |
async def render_blob(datasette, database, rows, columns, request, table, view_name): | |
if _BLOB_COLUMN not in request.args: | |
raise BadRequest(f"?{_BLOB_COLUMN}= is required") | |
blob_column = request.args[_BLOB_COLUMN] | |
if blob_column not in columns: | |
raise BadRequest(f"{blob_column} is not a valid column") | |
# If ?_blob_hash= provided, use that to select the row - otherwise use first row | |
blob_hash = None | |
if _BLOB_HASH in request.args: | |
blob_hash = request.args[_BLOB_HASH] | |
for row in rows: | |
value = row[blob_column] | |
if hashlib.sha256(value).hexdigest() == blob_hash: | |
break | |
else: | |
# Loop did not break | |
raise BadRequest( | |
"Link has expired - the requested binary content has changed or could not be found." | |
) | |
else: | |
row = rows[0] | |
value = row[blob_column] | |
filename_bits = [] | |
if table: | |
filename_bits.append(to_css_class(table)) | |
if "pks" in request.url_vars: | |
filename_bits.append(request.url_vars["pks"]) | |
filename_bits.append(to_css_class(blob_column)) | |
if blob_hash: | |
filename_bits.append(blob_hash[:6]) | |
filename = "-".join(filename_bits) + ".blob" | |
headers = { | |
"X-Content-Type-Options": "nosniff", | |
"Content-Disposition": f'attachment; filename="{filename}"', | |
} | |
return Response( | |
body=value or b"", | |
status=200, | |
headers=headers, | |
content_type="application/binary", | |
) | |
@hookimpl | |
def register_output_renderer(): | |
return { | |
"extension": "blob", | |
"render": render_blob, | |
"can_render": lambda: False, | |
} | |
</document_content> | |
</document> | |
<document index="6"> | |
<source>datasette/cli.py</source> | |
<document_content> | |
import asyncio | |
import uvicorn | |
import click | |
from click import formatting | |
from click.types import CompositeParamType | |
from click_default_group import DefaultGroup | |
import functools | |
import json | |
import os | |
import pathlib | |
from runpy import run_module | |
import shutil | |
from subprocess import call | |
import sys | |
import textwrap | |
import webbrowser | |
from .app import ( | |
Datasette, | |
DEFAULT_SETTINGS, | |
SETTINGS, | |
SQLITE_LIMIT_ATTACHED, | |
pm, | |
) | |
from .utils import ( | |
LoadExtension, | |
StartupError, | |
check_connection, | |
deep_dict_update, | |
find_spatialite, | |
parse_metadata, | |
ConnectionProblem, | |
SpatialiteConnectionProblem, | |
initial_path_for_datasette, | |
pairs_to_nested_config, | |
temporary_docker_directory, | |
value_as_boolean, | |
SpatialiteNotFound, | |
StaticMount, | |
ValueAsBooleanError, | |
) | |
from .utils.sqlite import sqlite3 | |
from .utils.testing import TestClient | |
from .version import __version__ | |
# Use Rich for tracebacks if it is installed | |
try: | |
from rich.traceback import install | |
install(show_locals=True) | |
except ImportError: | |
pass | |
class Setting(CompositeParamType): | |
name = "setting" | |
arity = 2 | |
def convert(self, config, param, ctx): | |
name, value = config | |
if name in DEFAULT_SETTINGS: | |
# For backwards compatibility with how this worked prior to | |
# Datasette 1.0, we turn bare setting names into setting.name | |
# Type checking for those older settings | |
default = DEFAULT_SETTINGS[name] | |
name = "settings.{}".format(name) | |
if isinstance(default, bool): | |
try: | |
return name, "true" if value_as_boolean(value) else "false" | |
except ValueAsBooleanError: | |
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx) | |
elif isinstance(default, int): | |
if not value.isdigit(): | |
self.fail(f'"{name}" should be an integer', param, ctx) | |
return name, value | |
elif isinstance(default, str): | |
return name, value | |
else: | |
# Should never happen: | |
self.fail("Invalid option") | |
return name, value | |
def sqlite_extensions(fn): | |
fn = click.option( | |
"sqlite_extensions", | |
"--load-extension", | |
type=LoadExtension(), | |
envvar="DATASETTE_LOAD_EXTENSION", | |
multiple=True, | |
help="Path to a SQLite extension to load, and optional entrypoint", | |
)(fn) | |
# Wrap it in a custom error handler | |
@functools.wraps(fn) | |
def wrapped(*args, **kwargs): | |
try: | |
return fn(*args, **kwargs) | |
except AttributeError as e: | |
if "enable_load_extension" in str(e): | |
raise click.ClickException( | |
textwrap.dedent( | |
""" | |
Your Python installation does not have the ability to load SQLite extensions. | |
More information: https://datasette.io/help/extensions | |
""" | |
).strip() | |
) | |
raise | |
return wrapped | |
@click.group(cls=DefaultGroup, default="serve", default_if_no_args=True) | |
@click.version_option(version=__version__) | |
def cli(): | |
""" | |
Datasette is an open source multi-tool for exploring and publishing data | |
\b | |
About Datasette: https://datasette.io/ | |
Full documentation: https://docs.datasette.io/ | |
""" | |
@cli.command() | |
@click.argument("files", type=click.Path(exists=True), nargs=-1) | |
@click.option("--inspect-file", default="-") | |
@sqlite_extensions | |
def inspect(files, inspect_file, sqlite_extensions): | |
""" | |
Generate JSON summary of provided database files | |
This can then be passed to "datasette --inspect-file" to speed up count | |
operations against immutable database files. | |
""" | |
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) | |
loop = asyncio.get_event_loop() | |
inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions)) | |
if inspect_file == "-": | |
sys.stdout.write(json.dumps(inspect_data, indent=2)) | |
else: | |
with open(inspect_file, "w") as fp: | |
fp.write(json.dumps(inspect_data, indent=2)) | |
async def inspect_(files, sqlite_extensions): | |
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) | |
data = {} | |
for name, database in app.databases.items(): | |
counts = await database.table_counts(limit=3600 * 1000) | |
data[name] = { | |
"hash": database.hash, | |
"size": database.size, | |
"file": database.path, | |
"tables": { | |
table_name: {"count": table_count} | |
for table_name, table_count in counts.items() | |
}, | |
} | |
return data | |
@cli.group() | |
def publish(): | |
"""Publish specified SQLite database files to the internet along with a Datasette-powered interface and API""" | |
pass | |
# Register publish plugins | |
pm.hook.publish_subcommand(publish=publish) | |
@cli.command() | |
@click.option("--all", help="Include built-in default plugins", is_flag=True) | |
@click.option( | |
"--requirements", help="Output requirements.txt of installed plugins", is_flag=True | |
) | |
@click.option( | |
"--plugins-dir", | |
type=click.Path(exists=True, file_okay=False, dir_okay=True), | |
help="Path to directory containing custom plugins", | |
) | |
def plugins(all, requirements, plugins_dir): | |
"""List currently installed plugins""" | |
app = Datasette([], plugins_dir=plugins_dir) | |
if requirements: | |
for plugin in app._plugins(): | |
if plugin["version"]: | |
click.echo("{}=={}".format(plugin["name"], plugin["version"])) | |
else: | |
click.echo(json.dumps(app._plugins(all=all), indent=4)) | |
@cli.command() | |
@click.argument("files", type=click.Path(exists=True), nargs=-1, required=True) | |
@click.option( | |
"-t", | |
"--tag", | |
help="Name for the resulting Docker container, can optionally use name:tag format", | |
) | |
@click.option( | |
"-m", | |
"--metadata", | |
type=click.File(mode="r"), | |
help="Path to JSON/YAML file containing metadata to publish", | |
) | |
@click.option("--extra-options", help="Extra options to pass to datasette serve") | |
@click.option("--branch", help="Install datasette from a GitHub branch e.g. main") | |
@click.option( | |
"--template-dir", | |
type=click.Path(exists=True, file_okay=False, dir_okay=True), | |
help="Path to directory containing custom templates", | |
) | |
@click.option( | |
"--plugins-dir", | |
type=click.Path(exists=True, file_okay=False, dir_okay=True), | |
help="Path to directory containing custom plugins", | |
) | |
@click.option( | |
"--static", | |
type=StaticMount(), | |
help="Serve static files from this directory at /MOUNT/...", | |
multiple=True, | |
) | |
@click.option( | |
"--install", help="Additional packages (e.g. plugins) to install", multiple=True | |
) | |
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension") | |
@click.option("--version-note", help="Additional note to show on /-/versions") | |
@click.option( | |
"--secret", | |
help="Secret used for signing secure values, such as signed cookies", | |
envvar="DATASETTE_PUBLISH_SECRET", | |
default=lambda: os.urandom(32).hex(), | |
) | |
@click.option( | |
"-p", | |
"--port", | |
default=8001, | |
type=click.IntRange(1, 65535), | |
help="Port to run the server on, defaults to 8001", | |
) | |
@click.option("--title", help="Title for metadata") | |
@click.option("--license", help="License label for metadata") | |
@click.option("--license_url", help="License URL for metadata") | |
@click.option("--source", help="Source label for metadata") | |
@click.option("--source_url", help="Source URL for metadata") | |
@click.option("--about", help="About label for metadata") | |
@click.option("--about_url", help="About URL for metadata") | |
def package( | |
files, | |
tag, | |
metadata, | |
extra_options, | |
branch, | |
template_dir, | |
plugins_dir, | |
static, | |
install, | |
spatialite, | |
version_note, | |
secret, | |
port, | |
**extra_metadata, | |
): | |
"""Package SQLite files into a Datasette Docker container""" | |
if not shutil.which("docker"): | |
click.secho( | |
' The package command requires "docker" to be installed and configured ', | |
bg="red", | |
fg="white", | |
bold=True, | |
err=True, | |
) | |
sys.exit(1) | |
with temporary_docker_directory( | |
files, | |
"datasette", | |
metadata=metadata, | |
extra_options=extra_options, | |
branch=branch, | |
template_dir=template_dir, | |
plugins_dir=plugins_dir, | |
static=static, | |
install=install, | |
spatialite=spatialite, | |
version_note=version_note, | |
secret=secret, | |
extra_metadata=extra_metadata, | |
port=port, | |
): | |
args = ["docker", "build"] | |
if tag: | |
args.append("-t") | |
args.append(tag) | |
args.append(".") | |
call(args) | |
@cli.command() | |
@click.argument("packages", nargs=-1) | |
@click.option( | |
"-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version" | |
) | |
@click.option( | |
"-r", | |
"--requirement", | |
type=click.Path(exists=True), | |
help="Install from requirements file", | |
) | |
@click.option( | |
"-e", | |
"--editable", | |
help="Install a project in editable mode from this path", | |
) | |
def install(packages, upgrade, requirement, editable): | |
"""Install plugins and packages from PyPI into the same environment as Datasette""" | |
if not packages and not requirement and not editable: | |
raise click.UsageError("Please specify at least one package to install") | |
args = ["pip", "install"] | |
if upgrade: | |
args += ["--upgrade"] | |
if editable: | |
args += ["--editable", editable] | |
if requirement: | |
args += ["-r", requirement] | |
args += list(packages) | |
sys.argv = args | |
run_module("pip", run_name="__main__") | |
@cli.command() | |
@click.argument("packages", nargs=-1, required=True) | |
@click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation") | |
def uninstall(packages, yes): | |
"""Uninstall plugins and Python packages from the Datasette environment""" | |
sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else []) | |
run_module("pip", run_name="__main__") | |
@cli.command() | |
@click.argument("files", type=click.Path(), nargs=-1) | |
@click.option( | |
"-i", | |
"--immutable", | |
type=click.Path(exists=True), | |
help="Database files to open in immutable mode", | |
multiple=True, | |
) | |
@click.option( | |
"-h", | |
"--host", | |
default="127.0.0.1", | |
help=( | |
"Host for server. Defaults to 127.0.0.1 which means only connections " | |
"from the local machine will be allowed. Use 0.0.0.0 to listen to " | |
"all IPs and allow access from other machines." | |
), | |
) | |
@click.option( | |
"-p", | |
"--port", | |
default=8001, | |
type=click.IntRange(0, 65535), | |
help="Port for server, defaults to 8001. Use -p 0 to automatically assign an available port.", | |
) | |
@click.option( | |
"--uds", | |
help="Bind to a Unix domain socket", | |
) | |
@click.option( | |
"--reload", | |
is_flag=True, | |
help="Automatically reload if code or metadata change detected - useful for development", | |
) | |
@click.option( | |
"--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *" | |
) | |
@sqlite_extensions | |
@click.option( | |
"--inspect-file", help='Path to JSON file created using "datasette inspect"' | |
) | |
@click.option( | |
"-m", | |
"--metadata", | |
type=click.File(mode="r"), | |
help="Path to JSON/YAML file containing license/source metadata", | |
) | |
@click.option( | |
"--template-dir", | |
type=click.Path(exists=True, file_okay=False, dir_okay=True), | |
help="Path to directory containing custom templates", | |
) | |
@click.option( | |
"--plugins-dir", | |
type=click.Path(exists=True, file_okay=False, dir_okay=True), | |
help="Path to directory containing custom plugins", | |
) | |
@click.option( | |
"--static", | |
type=StaticMount(), | |
help="Serve static files from this directory at /MOUNT/...", | |
multiple=True, | |
) | |
@click.option("--memory", is_flag=True, help="Make /_memory database available") | |
@click.option( | |
"-c", | |
"--config", | |
type=click.File(mode="r"), | |
help="Path to JSON/YAML Datasette configuration file", | |
) | |
@click.option( | |
"-s", | |
"--setting", | |
"settings", | |
type=Setting(), | |
help="nested.key, value setting to use in Datasette configuration", | |
multiple=True, | |
) | |
@click.option( | |
"--secret", | |
help="Secret used for signing secure values, such as signed cookies", | |
envvar="DATASETTE_SECRET", | |
) | |
@click.option( | |
"--root", | |
help="Output URL that sets a cookie authenticating the root user", | |
is_flag=True, | |
) | |
@click.option( | |
"--get", | |
help="Run an HTTP GET request against this path, print results and exit", | |
) | |
@click.option( | |
"--token", | |
help="API token to send with --get requests", | |
) | |
@click.option( | |
"--actor", | |
help="Actor to use for --get requests (JSON string)", | |
) | |
@click.option("--version-note", help="Additional note to show on /-/versions") | |
@click.option("--help-settings", is_flag=True, help="Show available settings") | |
@click.option("--pdb", is_flag=True, help="Launch debugger on any errors") | |
@click.option( | |
"-o", | |
"--open", | |
"open_browser", | |
is_flag=True, | |
help="Open Datasette in your web browser", | |
) | |
@click.option( | |
"--create", | |
is_flag=True, | |
help="Create database files if they do not exist", | |
) | |
@click.option( | |
"--crossdb", | |
is_flag=True, | |
help="Enable cross-database joins using the /_memory database", | |
) | |
@click.option( | |
"--nolock", | |
is_flag=True, | |
help="Ignore locking, open locked files in read-only mode", | |
) | |
@click.option( | |
"--ssl-keyfile", | |
help="SSL key file", | |
envvar="DATASETTE_SSL_KEYFILE", | |
) | |
@click.option( | |
"--ssl-certfile", | |
help="SSL certificate file", | |
envvar="DATASETTE_SSL_CERTFILE", | |
) | |
@click.option( | |
"--internal", | |
type=click.Path(), | |
help="Path to a persistent Datasette internal SQLite database", | |
) | |
def serve( | |
files, | |
immutable, | |
host, | |
port, | |
uds, | |
reload, | |
cors, | |
sqlite_extensions, | |
inspect_file, | |
metadata, | |
template_dir, | |
plugins_dir, | |
static, | |
memory, | |
config, | |
settings, | |
secret, | |
root, | |
get, | |
token, | |
actor, | |
version_note, | |
help_settings, | |
pdb, | |
open_browser, | |
create, | |
crossdb, | |
nolock, | |
ssl_keyfile, | |
ssl_certfile, | |
internal, | |
return_instance=False, | |
): | |
"""Serve up specified SQLite database files with a web UI""" | |
if help_settings: | |
formatter = formatting.HelpFormatter() | |
with formatter.section("Settings"): | |
formatter.write_dl( | |
[ | |
(option.name, f"{option.help} (default={option.default})") | |
for option in SETTINGS | |
] | |
) | |
click.echo(formatter.getvalue()) | |
sys.exit(0) | |
if reload: | |
import hupper | |
reloader = hupper.start_reloader("datasette.cli.serve") | |
if immutable: | |
reloader.watch_files(immutable) | |
if config: | |
reloader.watch_files([config.name]) | |
if metadata: | |
reloader.watch_files([metadata.name]) | |
inspect_data = None | |
if inspect_file: | |
with open(inspect_file) as fp: | |
inspect_data = json.load(fp) | |
metadata_data = None | |
if metadata: | |
metadata_data = parse_metadata(metadata.read()) | |
config_data = None | |
if config: | |
config_data = parse_metadata(config.read()) | |
config_data = config_data or {} | |
# Merge in settings from -s/--setting | |
if settings: | |
settings_updates = pairs_to_nested_config(settings) | |
# Merge recursively, to avoid over-writing nested values | |
# https://github.com/simonw/datasette/issues/2389 | |
deep_dict_update(config_data, settings_updates) | |
kwargs = dict( | |
immutables=immutable, | |
cache_headers=not reload, | |
cors=cors, | |
inspect_data=inspect_data, | |
config=config_data, | |
metadata=metadata_data, | |
sqlite_extensions=sqlite_extensions, | |
template_dir=template_dir, | |
plugins_dir=plugins_dir, | |
static_mounts=static, | |
settings=None, # These are passed in config= now | |
memory=memory, | |
secret=secret, | |
version_note=version_note, | |
pdb=pdb, | |
crossdb=crossdb, | |
nolock=nolock, | |
internal=internal, | |
) | |
# if files is a single directory, use that as config_dir= | |
if 1 == len(files) and os.path.isdir(files[0]): | |
kwargs["config_dir"] = pathlib.Path(files[0]) | |
files = [] | |
# Verify list of files, create if needed (and --create) | |
for file in files: | |
if not pathlib.Path(file).exists(): | |
if create: | |
sqlite3.connect(file).execute("vacuum") | |
else: | |
raise click.ClickException( | |
"Invalid value for '[FILES]...': Path '{}' does not exist.".format( | |
file | |
) | |
) | |
# De-duplicate files so 'datasette db.db db.db' only attaches one /db | |
files = list(dict.fromkeys(files)) | |
try: | |
ds = Datasette(files, **kwargs) | |
except SpatialiteNotFound: | |
raise click.ClickException("Could not find SpatiaLite extension") | |
except StartupError as e: | |
raise click.ClickException(e.args[0]) | |
if return_instance: | |
# Private utility mechanism for writing unit tests | |
return ds | |
# Run the "startup" plugin hooks | |
asyncio.get_event_loop().run_until_complete(ds.invoke_startup()) | |
# Run async soundness checks - but only if we're not under pytest | |
asyncio.get_event_loop().run_until_complete(check_databases(ds)) | |
if token and not get: | |
raise click.ClickException("--token can only be used with --get") | |
if get: | |
client = TestClient(ds) | |
headers = {} | |
if token: | |
headers["Authorization"] = "Bearer {}".format(token) | |
cookies = {} | |
if actor: | |
cookies["ds_actor"] = client.actor_cookie(json.loads(actor)) | |
response = client.get(get, headers=headers, cookies=cookies) | |
click.echo(response.text) | |
exit_code = 0 if response.status == 200 else 1 | |
sys.exit(exit_code) | |
return | |
# Start the server | |
url = None | |
if root: | |
url = "http://{}:{}{}?token={}".format( | |
host, port, ds.urls.path("-/auth-token"), ds._root_token | |
) | |
click.echo(url) | |
if open_browser: | |
if url is None: | |
# Figure out most convenient URL - to table, database or homepage | |
path = asyncio.get_event_loop().run_until_complete( | |
initial_path_for_datasette(ds) | |
) | |
url = f"http://{host}:{port}{path}" | |
webbrowser.open(url) | |
uvicorn_kwargs = dict( | |
host=host, port=port, log_level="info", lifespan="on", workers=1 | |
) | |
if uds: | |
uvicorn_kwargs["uds"] = uds | |
if ssl_keyfile: | |
uvicorn_kwargs["ssl_keyfile"] = ssl_keyfile | |
if ssl_certfile: | |
uvicorn_kwargs["ssl_certfile"] = ssl_certfile | |
uvicorn.run(ds.app(), **uvicorn_kwargs) | |
@cli.command() | |
@click.argument("id") | |
@click.option( | |
"--secret", | |
help="Secret used for signing the API tokens", | |
envvar="DATASETTE_SECRET", | |
required=True, | |
) | |
@click.option( | |
"-e", | |
"--expires-after", | |
help="Token should expire after this many seconds", | |
type=int, | |
) | |
@click.option( | |
"alls", | |
"-a", | |
"--all", | |
type=str, | |
metavar="ACTION", | |
multiple=True, | |
help="Restrict token to this action", | |
) | |
@click.option( | |
"databases", | |
"-d", | |
"--database", | |
type=(str, str), | |
metavar="DB ACTION", | |
multiple=True, | |
help="Restrict token to this action on this database", | |
) | |
@click.option( | |
"resources", | |
"-r", | |
"--resource", | |
type=(str, str, str), | |
metavar="DB RESOURCE ACTION", | |
multiple=True, | |
help="Restrict token to this action on this database resource (a table, SQL view or named query)", | |
) | |
@click.option( | |
"--debug", | |
help="Show decoded token", | |
is_flag=True, | |
) | |
@click.option( | |
"--plugins-dir", | |
type=click.Path(exists=True, file_okay=False, dir_okay=True), | |
help="Path to directory containing custom plugins", | |
) | |
def create_token( | |
id, secret, expires_after, alls, databases, resources, debug, plugins_dir | |
): | |
""" | |
Create a signed API token for the specified actor ID | |
Example: | |
datasette create-token root --secret mysecret | |
To allow only "view-database-download" for all databases: | |
\b | |
datasette create-token root --secret mysecret \\ | |
--all view-database-download | |
To allow "create-table" against a specific database: | |
\b | |
datasette create-token root --secret mysecret \\ | |
--database mydb create-table | |
To allow "insert-row" against a specific table: | |
\b | |
datasette create-token root --secret myscret \\ | |
--resource mydb mytable insert-row | |
Restricted actions can be specified multiple times using | |
multiple --all, --database, and --resource options. | |
Add --debug to see a decoded version of the token. | |
""" | |
ds = Datasette(secret=secret, plugins_dir=plugins_dir) | |
# Run ds.invoke_startup() in an event loop | |
loop = asyncio.get_event_loop() | |
loop.run_until_complete(ds.invoke_startup()) | |
# Warn about any unknown actions | |
actions = [] | |
actions.extend(alls) | |
actions.extend([p[1] for p in databases]) | |
actions.extend([p[2] for p in resources]) | |
for action in actions: | |
if not ds.permissions.get(action): | |
click.secho( | |
f" Unknown permission: {action} ", | |
fg="red", | |
err=True, | |
) | |
restrict_database = {} | |
for database, action in databases: | |
restrict_database.setdefault(database, []).append(action) | |
restrict_resource = {} | |
for database, resource, action in resources: | |
restrict_resource.setdefault(database, {}).setdefault(resource, []).append( | |
action | |
) | |
token = ds.create_token( | |
id, | |
expires_after=expires_after, | |
restrict_all=alls, | |
restrict_database=restrict_database, | |
restrict_resource=restrict_resource, | |
) | |
click.echo(token) | |
if debug: | |
encoded = token[len("dstok_") :] | |
click.echo("\nDecoded:\n") | |
click.echo(json.dumps(ds.unsign(encoded, namespace="token"), indent=2)) | |
pm.hook.register_commands(cli=cli) | |
async def check_databases(ds): | |
# Run check_connection against every connected database | |
# to confirm they are all usable | |
for database in list(ds.databases.values()): | |
try: | |
await database.execute_fn(check_connection) | |
except SpatialiteConnectionProblem: | |
suggestion = "" | |
try: | |
find_spatialite() | |
suggestion = "\n\nTry adding the --load-extension=spatialite option." | |
except SpatialiteNotFound: | |
pass | |
raise click.UsageError( | |
"It looks like you're trying to load a SpatiaLite" | |
+ " database without first loading the SpatiaLite module." | |
+ suggestion | |
+ "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html" | |
) | |
except ConnectionProblem as e: | |
raise click.UsageError( | |
f"Connection to {database.path} failed check: {str(e.args[0])}" | |
) | |
# If --crossdb and more than SQLITE_LIMIT_ATTACHED show warning | |
if ( | |
ds.crossdb | |
and len([db for db in ds.databases.values() if not db.is_memory]) | |
> SQLITE_LIMIT_ATTACHED | |
): | |
msg = ( | |
"Warning: --crossdb only works with the first {} attached databases".format( | |
SQLITE_LIMIT_ATTACHED | |
) | |
) | |
click.echo(click.style(msg, bold=True, fg="yellow"), err=True) | |
</document_content> | |
</document> | |
<document index="7"> | |
<source>datasette/database.py</source> | |
<document_content> | |
import asyncio | |
from collections import namedtuple | |
from pathlib import Path | |
import janus | |
import queue | |
import sqlite_utils | |
import sys | |
import threading | |
import uuid | |
from .tracer import trace | |
from .utils import ( | |
detect_fts, | |
detect_primary_keys, | |
detect_spatialite, | |
get_all_foreign_keys, | |
get_outbound_foreign_keys, | |
md5_not_usedforsecurity, | |
sqlite_timelimit, | |
sqlite3, | |
table_columns, | |
table_column_details, | |
) | |
from .utils.sqlite import sqlite_version | |
from .inspect import inspect_hash | |
connections = threading.local() | |
AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file")) | |
class Database: | |
# For table counts stop at this many rows: | |
count_limit = 10000 | |
def __init__( | |
self, | |
ds, | |
path=None, | |
is_mutable=True, | |
is_memory=False, | |
memory_name=None, | |
mode=None, | |
): | |
self.name = None | |
self.route = None | |
self.ds = ds | |
self.path = path | |
self.is_mutable = is_mutable | |
self.is_memory = is_memory | |
self.memory_name = memory_name | |
if memory_name is not None: | |
self.is_memory = True | |
self.cached_hash = None | |
self.cached_size = None | |
self._cached_table_counts = None | |
self._write_thread = None | |
self._write_queue = None | |
# These are used when in non-threaded mode: | |
self._read_connection = None | |
self._write_connection = None | |
# This is used to track all file connections so they can be closed | |
self._all_file_connections = [] | |
self.mode = mode | |
@property | |
def cached_table_counts(self): | |
if self._cached_table_counts is not None: | |
return self._cached_table_counts | |
# Maybe use self.ds.inspect_data to populate cached_table_counts | |
if self.ds.inspect_data and self.ds.inspect_data.get(self.name): | |
self._cached_table_counts = { | |
key: value["count"] | |
for key, value in self.ds.inspect_data[self.name]["tables"].items() | |
} | |
return self._cached_table_counts | |
@property | |
def color(self): | |
if self.hash: | |
return self.hash[:6] | |
return md5_not_usedforsecurity(self.name)[:6] | |
def suggest_name(self): | |
if self.path: | |
return Path(self.path).stem | |
elif self.memory_name: | |
return self.memory_name | |
else: | |
return "db" | |
def connect(self, write=False): | |
extra_kwargs = {} | |
if write: | |
extra_kwargs["isolation_level"] = "IMMEDIATE" | |
if self.memory_name: | |
uri = "file:{}?mode=memory&cache=shared".format(self.memory_name) | |
conn = sqlite3.connect( | |
uri, uri=True, check_same_thread=False, **extra_kwargs | |
) | |
if not write: | |
conn.execute("PRAGMA query_only=1") | |
return conn | |
if self.is_memory: | |
return sqlite3.connect(":memory:", uri=True) | |
# mode=ro or immutable=1? | |
if self.is_mutable: | |
qs = "?mode=ro" | |
if self.ds.nolock: | |
qs += "&nolock=1" | |
else: | |
qs = "?immutable=1" | |
assert not (write and not self.is_mutable) | |
if write: | |
qs = "" | |
if self.mode is not None: | |
qs = f"?mode={self.mode}" | |
conn = sqlite3.connect( | |
f"file:{self.path}{qs}", uri=True, check_same_thread=False, **extra_kwargs | |
) | |
self._all_file_connections.append(conn) | |
return conn | |
def close(self): | |
# Close all connections - useful to avoid running out of file handles in tests | |
for connection in self._all_file_connections: | |
connection.close() | |
async def execute_write(self, sql, params=None, block=True): | |
def _inner(conn): | |
return conn.execute(sql, params or []) | |
with trace("sql", database=self.name, sql=sql.strip(), params=params): | |
results = await self.execute_write_fn(_inner, block=block) | |
return results | |
async def execute_write_script(self, sql, block=True): | |
def _inner(conn): | |
return conn.executescript(sql) | |
with trace("sql", database=self.name, sql=sql.strip(), executescript=True): | |
results = await self.execute_write_fn(_inner, block=block) | |
return results | |
async def execute_write_many(self, sql, params_seq, block=True): | |
def _inner(conn): | |
count = 0 | |
def count_params(params): | |
nonlocal count | |
for param in params: | |
count += 1 | |
yield param | |
return conn.executemany(sql, count_params(params_seq)), count | |
with trace( | |
"sql", database=self.name, sql=sql.strip(), executemany=True | |
) as kwargs: | |
results, count = await self.execute_write_fn(_inner, block=block) | |
kwargs["count"] = count | |
return results | |
async def execute_isolated_fn(self, fn): | |
# Open a new connection just for the duration of this function | |
# blocking the write queue to avoid any writes occurring during it | |
if self.ds.executor is None: | |
# non-threaded mode | |
isolated_connection = self.connect(write=True) | |
try: | |
result = fn(isolated_connection) | |
finally: | |
isolated_connection.close() | |
try: | |
self._all_file_connections.remove(isolated_connection) | |
except ValueError: | |
# Was probably a memory connection | |
pass | |
return result | |
else: | |
# Threaded mode - send to write thread | |
return await self._send_to_write_thread(fn, isolated_connection=True) | |
async def execute_write_fn(self, fn, block=True, transaction=True): | |
if self.ds.executor is None: | |
# non-threaded mode | |
if self._write_connection is None: | |
self._write_connection = self.connect(write=True) | |
self.ds._prepare_connection(self._write_connection, self.name) | |
if transaction: | |
with self._write_connection: | |
return fn(self._write_connection) | |
else: | |
return fn(self._write_connection) | |
else: | |
return await self._send_to_write_thread( | |
fn, block=block, transaction=transaction | |
) | |
async def _send_to_write_thread( | |
self, fn, block=True, isolated_connection=False, transaction=True | |
): | |
if self._write_queue is None: | |
self._write_queue = queue.Queue() | |
if self._write_thread is None: | |
self._write_thread = threading.Thread( | |
target=self._execute_writes, daemon=True | |
) | |
self._write_thread.name = "_execute_writes for database {}".format( | |
self.name | |
) | |
self._write_thread.start() | |
task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io") | |
reply_queue = janus.Queue() | |
self._write_queue.put( | |
WriteTask(fn, task_id, reply_queue, isolated_connection, transaction) | |
) | |
if block: | |
result = await reply_queue.async_q.get() | |
if isinstance(result, Exception): | |
raise result | |
else: | |
return result | |
else: | |
return task_id | |
def _execute_writes(self): | |
# Infinite looping thread that protects the single write connection | |
# to this database | |
conn_exception = None | |
conn = None | |
try: | |
conn = self.connect(write=True) | |
self.ds._prepare_connection(conn, self.name) | |
except Exception as e: | |
conn_exception = e | |
while True: | |
task = self._write_queue.get() | |
if conn_exception is not None: | |
result = conn_exception | |
else: | |
if task.isolated_connection: | |
isolated_connection = self.connect(write=True) | |
try: | |
result = task.fn(isolated_connection) | |
except Exception as e: | |
sys.stderr.write("{}\n".format(e)) | |
sys.stderr.flush() | |
result = e | |
finally: | |
isolated_connection.close() | |
try: | |
self._all_file_connections.remove(isolated_connection) | |
except ValueError: | |
# Was probably a memory connection | |
pass | |
else: | |
try: | |
if task.transaction: | |
with conn: | |
result = task.fn(conn) | |
else: | |
result = task.fn(conn) | |
except Exception as e: | |
sys.stderr.write("{}\n".format(e)) | |
sys.stderr.flush() | |
result = e | |
task.reply_queue.sync_q.put(result) | |
async def execute_fn(self, fn): | |
if self.ds.executor is None: | |
# non-threaded mode | |
if self._read_connection is None: | |
self._read_connection = self.connect() | |
self.ds._prepare_connection(self._read_connection, self.name) | |
return fn(self._read_connection) | |
# threaded mode | |
def in_thread(): | |
conn = getattr(connections, self.name, None) | |
if not conn: | |
conn = self.connect() | |
self.ds._prepare_connection(conn, self.name) | |
setattr(connections, self.name, conn) | |
return fn(conn) | |
return await asyncio.get_event_loop().run_in_executor( | |
self.ds.executor, in_thread | |
) | |
async def execute( | |
self, | |
sql, | |
params=None, | |
truncate=False, | |
custom_time_limit=None, | |
page_size=None, | |
log_sql_errors=True, | |
): | |
"""Executes sql against db_name in a thread""" | |
page_size = page_size or self.ds.page_size | |
def sql_operation_in_thread(conn): | |
time_limit_ms = self.ds.sql_time_limit_ms | |
if custom_time_limit and custom_time_limit < time_limit_ms: | |
time_limit_ms = custom_time_limit | |
with sqlite_timelimit(conn, time_limit_ms): | |
try: | |
cursor = conn.cursor() | |
cursor.execute(sql, params if params is not None else {}) | |
max_returned_rows = self.ds.max_returned_rows | |
if max_returned_rows == page_size: | |
max_returned_rows += 1 | |
if max_returned_rows and truncate: | |
rows = cursor.fetchmany(max_returned_rows + 1) | |
truncated = len(rows) > max_returned_rows | |
rows = rows[:max_returned_rows] | |
else: | |
rows = cursor.fetchall() | |
truncated = False | |
except (sqlite3.OperationalError, sqlite3.DatabaseError) as e: | |
if e.args == ("interrupted",): | |
raise QueryInterrupted(e, sql, params) | |
if log_sql_errors: | |
sys.stderr.write( | |
"ERROR: conn={}, sql = {}, params = {}: {}\n".format( | |
conn, repr(sql), params, e | |
) | |
) | |
sys.stderr.flush() | |
raise | |
if truncate: | |
return Results(rows, truncated, cursor.description) | |
else: | |
return Results(rows, False, cursor.description) | |
with trace("sql", database=self.name, sql=sql.strip(), params=params): | |
results = await self.execute_fn(sql_operation_in_thread) | |
return results | |
@property | |
def hash(self): | |
if self.cached_hash is not None: | |
return self.cached_hash | |
elif self.is_mutable or self.is_memory: | |
return None | |
elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): | |
self.cached_hash = self.ds.inspect_data[self.name]["hash"] | |
return self.cached_hash | |
else: | |
p = Path(self.path) | |
self.cached_hash = inspect_hash(p) | |
return self.cached_hash | |
@property | |
def size(self): | |
if self.cached_size is not None: | |
return self.cached_size | |
elif self.is_memory: | |
return 0 | |
elif self.is_mutable: | |
return Path(self.path).stat().st_size | |
elif self.ds.inspect_data and self.ds.inspect_data.get(self.name): | |
self.cached_size = self.ds.inspect_data[self.name]["size"] | |
return self.cached_size | |
else: | |
self.cached_size = Path(self.path).stat().st_size | |
return self.cached_size | |
async def table_counts(self, limit=10): | |
if not self.is_mutable and self.cached_table_counts is not None: | |
return self.cached_table_counts | |
# Try to get counts for each table, $limit timeout for each count | |
counts = {} | |
for table in await self.table_names(): | |
try: | |
table_count = ( | |
await self.execute( | |
f"select count(*) from (select * from [{table}] limit {self.count_limit + 1})", | |
custom_time_limit=limit, | |
) | |
).rows[0][0] | |
counts[table] = table_count | |
# In some cases I saw "SQL Logic Error" here in addition to | |
# QueryInterrupted - so we catch that too: | |
except (QueryInterrupted, sqlite3.OperationalError, sqlite3.DatabaseError): | |
counts[table] = None | |
if not self.is_mutable: | |
self._cached_table_counts = counts | |
return counts | |
@property | |
def mtime_ns(self): | |
if self.is_memory: | |
return None | |
return Path(self.path).stat().st_mtime_ns | |
async def attached_databases(self): | |
# This used to be: | |
# select seq, name, file from pragma_database_list() where seq > 0 | |
# But SQLite prior to 3.16.0 doesn't support pragma functions | |