Porównaj commity

...

6 Commity

Autor SHA1 Wiadomość Data
Ian Chartier cb8257b598 Group Issue 2 Fixed
This commit fixes the group issue 2. Essentially, when checking files, I modified it to become a list, and remove 'serve' from it if present, then convert files back to a tuple. This effectively removes the problem of taking 'serve' as a path to the database.
2024-04-26 10:49:07 -04:00
Ian Chartier 171e3e3d0c Final fix for Group issue 2: 2123
My previous group issue 2 commit did not actually fix the issue, so I had to go back and fix it. Basically the issue wasn't calling the wrong argument in the datasette argument, but rather the command serve did not need to be called in the argument since the --reload command was already calling it. So, I reverted the changes I made in the previous commit and added the proper error message that better explains what is going on.

Now reload no longer calls serve and now the intended command "datasette  --reload <path/to/database>" works correctly
2024-04-24 16:29:20 -04:00
Tyler McKenzie 9200d59c08 Group Issue #1910
Potential fix in datasette/views/database.py.

Issue #1910 refers to columns having no type checking and upon analysis it sets columns with no types as a "text" string. Implemented a feature that checks if the instance variables within that column are either Integers or Floats and sets them accordingly. If they are not it is then set as a "text" string instead as originally intended.

#1910 was not even an issue in the first place, I just classified Integers and Floats as themselves instead of everything being "text" strings.
2024-04-24 16:12:12 -04:00
Jonathan Quigley 35b2d267f7 Merge remote-tracking branch 'origin/main' 2024-04-22 22:08:58 -04:00
Jonathan Quigley 40318df506 fixed issued 1980 2024-04-22 22:06:56 -04:00
Tyler McKenzie 8599ce8e15 minor commit to allow me to pull 2024-04-22 19:52:41 -04:00
3 zmienionych plików z 100 dodań i 77 usunięć

Wyświetl plik

@ -21,9 +21,9 @@ from .app import (
SQLITE_LIMIT_ATTACHED,
pm,
)
from datasette.views.error_module import StartupError
from .utils import (
LoadExtension,
StartupError,
check_connection,
find_spatialite,
parse_metadata,
@ -248,20 +248,20 @@ def plugins(all, requirements, plugins_dir):
@click.option("--about", help="About label for metadata")
@click.option("--about_url", help="About URL for metadata")
def package(
files,
tag,
metadata,
extra_options,
branch,
template_dir,
plugins_dir,
static,
install,
spatialite,
version_note,
secret,
port,
**extra_metadata,
files,
tag,
metadata,
extra_options,
branch,
template_dir,
plugins_dir,
static,
install,
spatialite,
version_note,
secret,
port,
**extra_metadata,
):
"""Package SQLite files into a Datasette Docker container"""
if not shutil.which("docker"):
@ -274,20 +274,20 @@ def package(
)
sys.exit(1)
with temporary_docker_directory(
files,
"datasette",
metadata=metadata,
extra_options=extra_options,
branch=branch,
template_dir=template_dir,
plugins_dir=plugins_dir,
static=static,
install=install,
spatialite=spatialite,
version_note=version_note,
secret=secret,
extra_metadata=extra_metadata,
port=port,
files,
"datasette",
metadata=metadata,
extra_options=extra_options,
branch=branch,
template_dir=template_dir,
plugins_dir=plugins_dir,
static=static,
install=install,
spatialite=spatialite,
version_note=version_note,
secret=secret,
extra_metadata=extra_metadata,
port=port,
):
args = ["docker", "build"]
if tag:
@ -352,9 +352,9 @@ def uninstall(packages, yes):
"--host",
default="127.0.0.1",
help=(
"Host for server. Defaults to 127.0.0.1 which means only connections "
"from the local machine will be allowed. Use 0.0.0.0 to listen to "
"all IPs and allow access from other machines."
"Host for server. Defaults to 127.0.0.1 which means only connections "
"from the local machine will be allowed. Use 0.0.0.0 to listen to "
"all IPs and allow access from other machines."
),
)
@click.option(
@ -478,38 +478,38 @@ def uninstall(packages, yes):
help="Path to a persistent Datasette internal SQLite database",
)
def serve(
files,
immutable,
host,
port,
uds,
reload,
cors,
sqlite_extensions,
inspect_file,
metadata,
template_dir,
plugins_dir,
static,
memory,
config,
settings,
secret,
root,
get,
token,
actor,
version_note,
help_settings,
pdb,
open_browser,
create,
crossdb,
nolock,
ssl_keyfile,
ssl_certfile,
internal,
return_instance=False,
files,
immutable,
host,
port,
uds,
reload,
cors,
sqlite_extensions,
inspect_file,
metadata,
template_dir,
plugins_dir,
static,
memory,
config,
settings,
secret,
root,
get,
token,
actor,
version_note,
help_settings,
pdb,
open_browser,
create,
crossdb,
nolock,
ssl_keyfile,
ssl_certfile,
internal,
return_instance=False,
):
"""Serve up specified SQLite database files with a web UI"""
if help_settings:
@ -576,18 +576,22 @@ def serve(
)
# if files is a single directory, use that as config_dir=
if 2 == len(files) and os.path.isdir(files[0]):
if 1 == len(files) and os.path.isdir(files[0]):
kwargs["config_dir"] = pathlib.Path(files[0])
files = []
# Verify list of files, create if needed (and --create)
for file in files:
files = list(files)
if files[0] == 'serve':
files.pop(0)
files = tuple(files)
if not pathlib.Path(file).exists():
if create:
sqlite3.connect(file).execute("vacuum")
else:
raise click.ClickException(
"Invalid value for '[FILES]...': Path '{}' does not exist.".format(
"Invalid value for '[FILES]...': Path '{}' does not exist".format(
file
)
)
@ -601,7 +605,6 @@ def serve(
raise click.ClickException("Could not find SpatiaLite extension")
except StartupError as e:
raise click.ClickException(e.args[0])
if return_instance:
# Private utility mechanism for writing unit tests
return ds
@ -708,7 +711,7 @@ def serve(
help="Path to directory containing custom plugins",
)
def create_token(
id, secret, expires_after, alls, databases, resources, debug, plugins_dir
id, secret, expires_after, alls, databases, resources, debug, plugins_dir
):
"""
Create a signed API token for the specified actor ID
@ -777,7 +780,7 @@ def create_token(
)
click.echo(token)
if debug:
encoded = token[len("dstok_") :]
encoded = token[len("dstok_"):]
click.echo("\nDecoded:\n")
click.echo(json.dumps(ds.unsign(encoded, namespace="token"), indent=2))
@ -810,9 +813,9 @@ async def check_databases(ds):
)
# If --crossdb and more than SQLITE_LIMIT_ATTACHED show warning
if (
ds.crossdb
and len([db for db in ds.databases.values() if not db.is_memory])
> SQLITE_LIMIT_ATTACHED
ds.crossdb
and len([db for db in ds.databases.values() if not db.is_memory])
> SQLITE_LIMIT_ATTACHED
):
msg = (
"Warning: --crossdb only works with the first {} attached databases".format(

Wyświetl plik

@ -933,8 +933,22 @@ class TableCreateView(BaseView):
return _error(["columns must be a list of objects"])
if not column.get("name") or not isinstance(column.get("name"), str):
return _error(["Column name is required"])
# Check if type is specified
if not column.get("type"):
column["type"] = "text"
# If type is not specified, check the values in the column
column_values = [value for value in column.get("values", []) if value is not None]
# Check if all values in the column are integers
if all(isinstance(value, int) for value in column_values):
column["type"] = "integer"
# Check if all values in the column are floats
elif all(isinstance(value, float) for value in column_values):
column["type"] = "float"
# If values are not all integers or floats, set type as "text"
else:
column["type"] = "text"
if column["type"] not in self._supported_column_types:
return _error(
["Unsupported column type: {}".format(column["type"])]
@ -1140,4 +1154,4 @@ async def display_rows(datasette, database, request, rows, columns):
display_value = display_value[:truncate_cells] + "\u2026"
display_row.append(display_value)
display_rows.append(display_row)
return display_rows
return display_rows

Wyświetl plik

@ -705,7 +705,12 @@ async def _sortable_columns_for_table(datasette, database_name, table_name, use_
db = datasette.databases[database_name]
table_metadata = await datasette.table_config(database_name, table_name)
if "sortable_columns" in table_metadata:
sortable_columns = set(table_metadata["sortable_columns"])
# fix now allows any primary key to be sorted as well with the metadata
sort_col = set(table_metadata["sortable_columns"])
pk_col = set(await db.primary_keys(table_name))
sortable_columns = [sort_col.pop()]
if len(pk_col) > 0:
sortable_columns.append(pk_col.pop())
else:
sortable_columns = set(await db.table_columns(table_name))
if use_rowid:
@ -713,7 +718,8 @@ async def _sortable_columns_for_table(datasette, database_name, table_name, use_
return sortable_columns
async def _sort_order(table_metadata, sortable_columns, request, order_by):
async def _sort_order(datasette, database_name, table_name, table_metadata, sortable_columns, request, order_by):
db = datasette.databases[database_name]
sort = request.args.get("_sort")
sort_desc = request.args.get("_sort_desc")
@ -1042,7 +1048,7 @@ async def table_view_data(
)
sort, sort_desc, order_by = await _sort_order(
table_metadata, sortable_columns, request, order_by
datasette, database_name, table_name, table_metadata, sortable_columns, request, order_by
)
from_sql = "from {table_name} {where}".format(