diff --git a/.github/workflows/automated-testing.yaml b/.github/workflows/automated-testing.yaml index cd5abfe10..d792fb208 100644 --- a/.github/workflows/automated-testing.yaml +++ b/.github/workflows/automated-testing.yaml @@ -10,7 +10,7 @@ on: jobs: run-tests: - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest env: # LD_PRELOAD: /lib/x86_64-linux-gnu/libSegFault.so # SEGFAULT_SIGNALS: all @@ -55,8 +55,8 @@ jobs: - name: Install linux dependencies run: | sudo apt-get update -y - sudo apt-get install -y postgresql-client --no-install-recommends ca-certificates || apt-get install -y --no-install-recommends ca-certificates - sudo update-ca-certificates || update-ca-certificates + sudo apt-get install -y postgresql-client ca-certificates + sudo update-ca-certificates - name: Set up JDK 11 uses: actions/setup-java@v4 with: diff --git a/api/__init__.py b/api/__init__.py index b423b8d9f..bf8cab862 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -1,4 +1,4 @@ -""" +__license__ = """ SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg diff --git a/api/actions.py b/api/actions.py index a8dffa195..32c0eb31b 100644 --- a/api/actions.py +++ b/api/actions.py @@ -1,5 +1,6 @@ -"""Api actions. +"""Api actions.""" +__license__ = """ SPDX-FileCopyrightText: 2025 Pierre Francois © Reiner Lemoine Institut SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. SPDX-FileCopyrightText: 2025 Eike Broda @@ -48,7 +49,7 @@ ) from sqlalchemy import types as sa_types from sqlalchemy.exc import NoSuchTableError -from sqlalchemy.sql.expression import Select +from sqlalchemy.sql.expression import Executable, Select import dataedit.metadata from api.error import APIError @@ -154,7 +155,7 @@ def assert_add_tag_permission(user, table_obj: Table, permission: int) -> None: def translate_fetched_cell(cell): if isinstance(cell, geoalchemy2.WKBElement): - return new_engine_execute(cell.ST_AsText()).scalar() + return _execute(_get_engine(), cell.ST_AsText()).scalar() elif isinstance(cell, memoryview): return wkb.dumps(wkb.loads(cell.tobytes()), hex=True) else: @@ -322,13 +323,9 @@ def describe_columns(table_obj: Table): table=table_obj.name, schema=table_obj.oedb_schema ) ) - response = session_execute(session, query) - - # Note: cast is only for type checking, - # should disappear once we migrate to sqlalchemy >= 1.4 - response = cast(ResultProxy, response) - + response = _execute(session, query) session.close() + return { column.column_name: { "ordinal_position": column.ordinal_position, @@ -372,7 +369,7 @@ def describe_indexes(table_obj: Table): table=table_obj.name, schema=table_obj.oedb_schema ) ) - response = session_execute(session, query) + response = _execute(session, query) session.close() # Use a single-value dictionary to allow future extension with downward @@ -403,7 +400,7 @@ def describe_constraints(table_obj: Table): query = "select constraint_name, constraint_type, is_deferrable, initially_deferred, pg_get_constraintdef(c.oid) as definition from information_schema.table_constraints JOIN pg_constraint AS c ON c.conname=constraint_name where table_name='{table}' AND constraint_schema='{schema}';".format( # noqa table=table_obj.name, schema=table_obj.oedb_schema ) - response = session_execute(session, query) + response = _execute(session, query) session.close() return { column.constraint_name: { @@ -433,7 +430,7 @@ def perform_sql(sql_statement, parameter: dict | None = None) -> dict: return get_response_dict(success=True) try: - result = session_execute_parameter(session, sql_statement, parameter) + result = _execute(session, sql_statement, parameter) except Exception as e: logger.error("SQL Action failed. \n Error:\n" + str(e)) session.rollback() @@ -662,7 +659,7 @@ def get_column_changes(reviewed=None, changed=None, table_obj: Table | None = No sql = "".join(query) - response = session_execute(session, sql) + response = _execute(session, sql) session.close() return [ @@ -715,7 +712,7 @@ def get_constraints_changes( sql = "".join(query) - response = session_execute(session, sql) + response = _execute(session, sql) session.close() return [ @@ -744,7 +741,6 @@ def get_column(d): def column_alter(query, table_obj: Table, column): - if column == "id": raise APIError("You cannot alter the id column") alter_preamble = 'ALTER TABLE "{schema}"."{table}" ALTER COLUMN "{column}" '.format( @@ -979,7 +975,6 @@ def __internal_select(query, context): def __change_rows( table_obj: Table, request, context, target_sa_table: "SATable", setter, fields=None ) -> dict: - query: dict = { "from": { "type": "table", @@ -1044,7 +1039,7 @@ def _drop_not_null_constraints_from_delete_meta_table( AND table_schema = '{meta_schema}' AND is_nullable = 'NO' """ - resp = new_engine_execute(query).fetchall() + resp = _execute(_get_engine(), query).fetchall() column_names = [x[0] for x in resp] if resp else [] # filter meta columns and id column_names = [ @@ -1058,7 +1053,7 @@ def _drop_not_null_constraints_from_delete_meta_table( # drop not null from these columns col_drop = ", ".join(f'ALTER "{c}" DROP NOT NULL' for c in column_names) query = f'ALTER TABLE "{meta_schema}"."{meta_table_delete}" {col_drop};' - new_engine_execute(query) + _execute(_get_engine(), query) def data_insert_check(table_obj: Table, values, context): @@ -1076,7 +1071,7 @@ def data_insert_check(table_obj: Table, values, context): table=table_obj.name, schema=table_obj.oedb_schema ) ) - response = session_execute(session, query) + response = _execute(session, query) session.close() for constraint in response: @@ -1091,7 +1086,7 @@ def data_insert_check(table_obj: Table, values, context): # Use joins instead to avoid piping your results through # python. if isinstance(values, Select): - values = new_engine_execute(values) + values = _execute(_get_engine(), values) for row in values: # TODO: This is horribly inefficient! query = { @@ -1178,7 +1173,8 @@ def execute_sqla(query, cursor: AbstractCursor | Session) -> None: params[key] = json.dumps(value) else: params[key] = dialect._json_serializer(value) - cursor_execute_parameter(cursor, str(compiled), params) + query = str(compiled) + _execute(cursor, query, params) except (psycopg2.DataError, exc.IdentifierError, psycopg2.IntegrityError) as e: raise APIError(str(e)) except psycopg2.InternalError as e: @@ -1208,7 +1204,8 @@ def execute_sqla(query, cursor: AbstractCursor | Session) -> None: def analyze_columns(table_obj: Table): - result = new_engine_execute( + result = _execute( + _get_engine(), "select column_name as id, data_type as type from information_schema.columns where table_name = '{table}' and table_schema='{schema}';".format( # noqa schema=table_obj.oedb_schema, table=table_obj.name ), @@ -1254,7 +1251,7 @@ def move_publish(table_obj: Table, topic, embargo_period): if not license_check: raise APIError( - "A issue with the license from the metadata was found: " f"{license_error}" + f"A issue with the license from the metadata was found: {license_error}" ) if embargo_period in ["6_months", "1_year"]: @@ -1319,29 +1316,6 @@ def fetchmany(request: dict, context): return cursor.fetchmany(int(request["size"])) -def getValue(table_obj: Table, column, id): - sql = 'SELECT {column} FROM "{schema}"."{table}" WHERE id={id}'.format( - column=column, schema=table_obj.oedb_schema, table=table_obj.name, id=id - ) - - session = _create_oedb_session() - - try: - result = session_execute(session, sql) - - returnValue = None - for row in result: - returnValue = row[column] - - return returnValue - except Exception as e: - logger.error("SQL Action failed. \n Error:\n" + str(e)) - session.rollback() - finally: - session.close() - return None - - def apply_changes(table_obj: Table, cursor: AbstractCursor | None = None): """Apply changes from the meta tables to the actual table. @@ -1366,7 +1340,6 @@ def add_type(d, type): cursor = cast(AbstractCursor, connection.cursor()) # type:ignore TODO try: - columns = list(describe_columns(table_obj).keys()) extended_columns = columns + ["_submitted", "_id"] @@ -1375,11 +1348,9 @@ def add_type(d, type): oedb_table = table_obj.get_oedb_table_proxy(user=None) insert_sa_table = oedb_table._insert_table.get_sa_table() - cursor_execute( + _execute( cursor, - "select * " - 'from "{schema}"."{table}" ' - "where _applied = FALSE;".format( + 'select * from "{schema}"."{table}" where _applied = FALSE;'.format( schema=insert_sa_table.schema, table=insert_sa_table.name ), ) @@ -1396,11 +1367,9 @@ def add_type(d, type): ] update_sa_table = oedb_table._edit_table.get_sa_table() - cursor_execute( + _execute( cursor, - "select * " - 'from "{schema}"."{table}" ' - "where _applied = FALSE;".format( + 'select * from "{schema}"."{table}" where _applied = FALSE;'.format( schema=update_sa_table.schema, table=update_sa_table.name ), ) @@ -1417,11 +1386,9 @@ def add_type(d, type): ] delete_sa_table = oedb_table._delete_table.get_sa_table() - cursor_execute( + _execute( cursor, - "select * " - 'from "{schema}"."{table}" ' - "where _applied = FALSE;".format( + 'select * from "{schema}"."{table}" where _applied = FALSE;'.format( schema=delete_sa_table.schema, table=delete_sa_table.name ), ) @@ -1478,7 +1445,6 @@ def _apply_stack(cursor: AbstractCursor, sa_table: "SATable", changes, change_ty def set_applied( session: AbstractCursor | Session, sa_table: "SATable", rids, mode: int ): - # TODO:permission check is still done outside of this function, # so we pass user=None oedb_table = Table.objects.get(name=sa_table.name).get_oedb_table_proxy(user=None) @@ -1498,7 +1464,9 @@ def set_applied( .values(_applied=True) .compile() ) - session_execute_parameter(session, str(update_query), update_query.params) + + query = str(update_query) + _execute(session, query, update_query.params) def apply_insert(session: AbstractCursor | Session, sa_table: "SATable", rows, rids): @@ -1618,7 +1586,7 @@ def get_single_table_size(table_obj: Table) -> dict | None: sess = _create_oedb_session() try: - res = session_execute_parameter( + res = _execute( sess, sql, {"schema": table_obj.oedb_schema, "table": table_obj.name} ) row = res.fetchone() @@ -1655,7 +1623,7 @@ def list_table_sizes() -> list[dict]: sess = _create_oedb_session() try: - res = session_execute(sess, sql) + res = _execute(sess, sql) rows = res.fetchall() or [] out = [] for r in rows: @@ -1680,7 +1648,7 @@ def table_has_row_with_id(table: Table, id: int | str, id_col: str = "id") -> bo engine = _get_engine() with engine.connect() as conn: - resp = connection_execute(conn, query, id=id) + resp = _execute(conn, query, id=id) row = resp.fetchone() row_count = row[0] if row else 0 @@ -1698,7 +1666,7 @@ def table_get_row_count(table: Table) -> int: engine = _get_engine() with engine.connect() as conn: - resp = connection_execute(conn, query) + resp = _execute(conn, query) row = resp.fetchone() row_count = row[0] if row else 0 @@ -1731,7 +1699,7 @@ def table_get_approx_row_count(table: Table, precise_below: int = 0) -> int: ) with engine.connect() as conn: - resp = connection_execute(conn, query) + resp = _execute(conn, query) row = resp.fetchone() row_count = row[0] if row else 0 @@ -1763,11 +1731,6 @@ def get_table_names( return [t.name for t in Table.objects.all()] -def data_info(request: dict, context: dict | None = None) -> dict: - # TODO: can we remove this endpoint? - return request - - def has_schema(request: dict, context: dict | None = None) -> bool: # TODO can we remove this endpoint return request.get("schema") in get_schema_names() @@ -1798,7 +1761,6 @@ def data_search(request: dict, context: dict | None = None) -> dict: def data_insert(request: dict, context: dict) -> dict: - cursor = load_cursor_from_context(context) # If the insert request is not for a meta table, change the request to do so table_obj = table_or_404_from_dict(request) @@ -1944,7 +1906,7 @@ def get_columns(request: dict, context: dict | None = None) -> dict: bindparams=[sql.bindparam("table_oid", type_=sa_types.Integer)], typemap={"attname": sa_types.Unicode, "default": sa_types.Unicode}, ) - c = connection_execute(connection, s, table_oid=table_oid) + c = _execute(connection, s, table_oid=table_oid) rows = c.fetchall() or [] domains = engine.dialect._load_domains(connection) @@ -2006,8 +1968,6 @@ def get_foreign_keys(request: dict, context: dict | None = None) -> dict: def get_indexes(request: dict, context: dict | None = None) -> dict: - # TODO can we remove this endpoint - table_obj = table_or_404_from_dict(request) engine = _get_engine() @@ -2152,47 +2112,14 @@ def fetchone(request: dict, context: dict) -> list | None: # ------------------------------------------------------------------------------------- -def session_execute(session: Session, sql) -> ResultProxy: - response = session.execute(sql) - # Note: cast is only for type checking, - # should disappear once we migrate to sqlalchemy >= 1.4 - response = cast(ResultProxy, response) - return response - - -def session_execute_parameter( - session: AbstractCursor | Session, sql, parameter +def _execute( + con: Session | Engine | Connection | AbstractCursor, + sql: Executable | str, + *args, + **kwargs, ) -> ResultProxy: - response = session.execute(sql, parameter) - # Note: cast is only for type checking, - # should disappear once we migrate to sqlalchemy >= 1.4 - response = cast(ResultProxy, response) - return response - - -def new_engine_execute(sql) -> ResultProxy: - return engine_execute(_get_engine(), sql) - - -def engine_execute(engine: Engine, sql) -> ResultProxy: - response = engine.execute(sql) - # Note: cast is only for type checking, - # should disappear once we migrate to sqlalchemy >= 1.4 - response = cast(ResultProxy, response) - return response - - -def connection_execute(connection: Connection, sql, **kwargs) -> ResultProxy: - response = connection.execute(sql, **kwargs) + response = con.execute(sql, *args, **kwargs) # type:ignore # Note: cast is only for type checking, # should disappear once we migrate to sqlalchemy >= 1.4 response = cast(ResultProxy, response) return response - - -def cursor_execute(cursor: AbstractCursor, sql) -> None: - cursor.execute(sql) - - -def cursor_execute_parameter(cursor: AbstractCursor | Session, sql, parameter) -> None: - cursor.execute(sql, parameter) diff --git a/api/apps.py b/api/apps.py index d32b9baaf..527566568 100644 --- a/api/apps.py +++ b/api/apps.py @@ -1,4 +1,4 @@ -""" +__license__ = """ SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/api/encode.py b/api/encode.py index af2352517..7e00eb661 100644 --- a/api/encode.py +++ b/api/encode.py @@ -1,4 +1,4 @@ -""" +__license__ = """ SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/api/error.py b/api/error.py index 0a02232f7..138f83314 100644 --- a/api/error.py +++ b/api/error.py @@ -1,4 +1,4 @@ -""" +__license__ = """ SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/api/helper.py b/api/helper.py index c767e2aaf..a1195aec4 100644 --- a/api/helper.py +++ b/api/helper.py @@ -1,4 +1,4 @@ -""" +__license__ = """ SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. @@ -27,10 +27,11 @@ from decimal import Decimal from typing import Callable, Union -import geoalchemy2 # noqa: Although this import seems unused is has to be here +import geoalchemy2 # noqa:F401 Although this import seems unused is has to be here import psycopg2 from django.core.exceptions import ObjectDoesNotExist from django.http import HttpRequest, JsonResponse, StreamingHttpResponse +from django.http.response import Http404 from django.utils import timezone from rest_framework import status from rest_framework.request import Request @@ -226,15 +227,13 @@ def wrapper(*args, **kwargs): return f(*args, **kwargs) except APIError as e: return JsonResponse({"reason": e.message}, status=e.status) - except Table.DoesNotExist: + except (Table.DoesNotExist, Http404): return JsonResponse({"reason": "table does not exist"}, status=404) - - # TODO: why cant' we handle all other errors here? (tests failing) - # except Exception as exc: - # # All other Errors: dont accidently return sensitive data from error - # # but return generic error message - # logger.error(str(exc)) - # return JsonResponse({"reason": "Invalid request."}, status=400) + except Exception as exc: + # All other Errors: dont accidently return sensitive data from error + # but return generic error message + logger.error(str(exc)) + return JsonResponse({"reason": "Invalid request"}, status=400) return wrapper diff --git a/api/parser.py b/api/parser.py index afdebd853..e08722e54 100644 --- a/api/parser.py +++ b/api/parser.py @@ -1,4 +1,4 @@ -""" +__license__ = """ SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. SPDX-FileCopyrightText: 2025 Johann Wagner © Otto-von-Guericke-Universität Magdeburg SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut @@ -107,15 +107,9 @@ def get_column_definition_query(d: dict) -> Column: d["autoincrement"] = True for fk in d.get("foreign_key", []): - table_obj = table_or_404_from_dict(fk) - - # NOTE: previously, this used a sqlalchemy MetaData without bind=engine. - # could this be a problem? fk_sa_table = table_obj.get_oedb_table_proxy()._main_table.get_sa_table() - fkcolumn = Column(get_or_403(fk, "column")) - fkcolumn.table = fk_sa_table args.append(ForeignKey(fkcolumn)) @@ -285,7 +279,6 @@ def set_meta_info(method, user, message=None) -> dict: def parse_insert( sa_table_insert: "SATable", d: dict, context: dict, message=None, mapper=None ): - field_strings = [] for field in d.get("fields", []): if not ( @@ -426,7 +419,6 @@ def parse_select(d: dict): elif type.lower() == "except": query.except_(subquery) if "order_by" in d: - for ob in d["order_by"]: expr = parse_expression(ob) @@ -890,8 +882,9 @@ def _parse_sqla_operator(raw_key, *operands): return x.distance_centroid(y) if key in ["getitem"]: if isinstance(y, Slice): - ystart, ystop = _parse_single(y.start, int), _parse_single( - y.stop, int + ystart, ystop = ( + _parse_single(y.start, int), + _parse_single(y.stop, int), ) return x[ystart:ystop] else: diff --git a/api/sessions.py b/api/sessions.py index 64a3778ff..672013149 100644 --- a/api/sessions.py +++ b/api/sessions.py @@ -1,5 +1,6 @@ -"""This module handles all relevant features that belong to specific sessions. +"""This module handles all relevant features that belong to specific sessions.""" +__license__ = """ SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/api/urls.py b/api/urls.py index ed2dd9eaf..3d3a74d11 100644 --- a/api/urls.py +++ b/api/urls.py @@ -1,4 +1,4 @@ -""" +__license__ = """ SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. SPDX-FileCopyrightText: 2025 Johann Wagner © Otto-von-Guericke-Universität Magdeburg @@ -67,7 +67,6 @@ usrprop_api_view, ) -# from api.views import TableFieldsAPIView, AdvancedInfoAPIView app_name = "api" pgsql_qualifier = r"[\w\d_]+" @@ -102,11 +101,6 @@ TableColumnAPIView.as_view(), name="table-columns", ), - # re_path( - # r"^(?P[\w\d_\s]+)/id/(?P[\d]+)/column/(?P[\w\d_\s]+)/$", # noqa - # TableFieldsAPIView.as_view(), - # name="table-fields", - # ), re_path( r"^(?P
[\w\d_\s]+)/rows/(?P[\d]+)?$", # noqa TableRowsAPIView.as_view(), @@ -135,7 +129,6 @@ ), re_path(r"^delete", AdvancedDeleteAPIView, name="advanced-delete"), re_path(r"^update", AdvancedUpdateAPIView, name="advanced-update"), - # re_path(r"^info", AdvancedInfoAPIView, name="advanced-info"), re_path(r"^has_schema", AdvancedHasSchemaAPIView, name="advanced-has-schema"), re_path(r"^has_table", AdvancedHasTableAPIView, name="advanced-has-table"), re_path( @@ -249,7 +242,7 @@ re_path( r"^cursor/fetch_many", AdvancedFetchAPIView.as_view(), - dict(fetchtype="all"), # TODO: shouldn't this be "many"? + dict(fetchtype="many"), name="advanced-cursor-fetch-many", ), re_path( diff --git a/api/utils.py b/api/utils.py index 0254ebcc2..7f483a237 100644 --- a/api/utils.py +++ b/api/utils.py @@ -1,13 +1,16 @@ """ Collection of utility functions for the API used to define various action like processing steps. +""" +__license__ = """ SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut SPDX-License-Identifier: AGPL-3.0-or-later """ # noqa: 501 from typing import TYPE_CHECKING, Mapping, Union, cast +from urllib.parse import urlsplit, urlunsplit from rest_framework.request import Request @@ -71,3 +74,8 @@ def table_or_404(table: str) -> "Table": def table_or_404_from_dict(dct: Mapping) -> "Table": return table_or_404(get_or_403(dct, "table")) + + +def strip_query(url): + parts = urlsplit(url) + return urlunsplit((parts.scheme, parts.netloc, parts.path, "", "")).rstrip("/") diff --git a/api/views.py b/api/views.py index 785323b94..ceeb96e0c 100644 --- a/api/views.py +++ b/api/views.py @@ -40,7 +40,7 @@ import json import re -import geoalchemy2 # noqa: Although this import seems unused is has to be here +import geoalchemy2 # noqa:F401 Although this import seems unused is has to be here import requests import zipstream from django.contrib.auth.mixins import LoginRequiredMixin @@ -68,7 +68,6 @@ column_alter, commit_raw_connection, data_delete, - data_info, data_insert, data_search, data_update, @@ -98,7 +97,6 @@ get_unique_constraints, get_view_definition, get_view_names, - getValue, has_schema, has_table, list_table_sizes, @@ -156,7 +154,13 @@ apply_embargo, parse_embargo_payload, ) -from api.utils import get_dataset_configs, get_or_403, request_data_dict, table_or_404 +from api.utils import ( + get_dataset_configs, + get_or_403, + request_data_dict, + strip_query, + table_or_404, +) from api.validators.column import validate_column_names from api.validators.identifier import assert_valid_table_name from dataedit.models import Table @@ -177,6 +181,10 @@ USE_ONTOP, ) +DBPEDIA_LOOKUP_SPARQL_ENDPOINT_URL_WO_QUERY = strip_query( + DBPEDIA_LOOKUP_SPARQL_ENDPOINT_URL +) + class TableMetadataAPIView(APIView): @api_exception @@ -416,29 +424,6 @@ def put(self, request: Request, table: str, column: str) -> JsonLikeResponse: return JsonResponse({}, status=201) -class TableFieldsAPIView(APIView): - # TODO: is this really used? - @api_exception - @method_decorator(never_cache) - def get( - self, - request: Request, - table: str, - row_id: int, - column: str | None = None, - ) -> JsonLikeResponse: - table_obj = table_or_404(table=table) - - if not is_pg_qual(table) or not is_pg_qual(row_id) or not is_pg_qual(column): - return ModJsonResponse({"error": "Bad Request", "http_status": 400}) - - returnValue = getValue(table_obj, column, row_id) - if returnValue is None: - return JsonResponse({}, status=404) - else: - return JsonResponse(returnValue, status=200) - - class TableMovePublishAPIView(APIView): @api_exception @require_admin_permission @@ -985,9 +970,14 @@ def oeo_search_api_view(request: Request) -> JsonLikeResponse: query = request.GET["query"] # call local search service # "http://loep/lookup-application/api/search?query={query}" - url = f"{DBPEDIA_LOOKUP_SPARQL_ENDPOINT_URL}{query}" + + # NOTE: to pass snyk security review, user data (request.GET["query"]) + # put into request.get() is dangerous and needs to be secured by + # clearly separating the base url + url = f"{DBPEDIA_LOOKUP_SPARQL_ENDPOINT_URL_WO_QUERY}?query={query}" res = requests.get(url).json() - # res: something like [{"label": "testlabel", "resource": "testresource"}] + # res: something like + # {"docs": [{"label": "testlabel", "resource": "testresource"}]} # send back to client else: raise APIError( @@ -1187,7 +1177,6 @@ def get(self, request: Request) -> JsonLikeResponse: AdvancedDeleteAPIView = create_ajax_handler(data_delete, requires_cursor=True) AdvancedUpdateAPIView = create_ajax_handler(data_update, requires_cursor=True) -AdvancedInfoAPIView = create_ajax_handler(data_info) AdvancedHasSchemaAPIView = create_ajax_handler(has_schema) AdvancedHasTableAPIView = create_ajax_handler(has_table) AdvancedGetSchemaNamesAPIView = create_ajax_handler(get_schema_names) diff --git a/base/management/commands/check_links.py b/base/management/commands/check_links.py index 242275550..f5eb76230 100644 --- a/base/management/commands/check_links.py +++ b/base/management/commands/check_links.py @@ -36,7 +36,7 @@ def iter_links(url, parent_url=None, root_url=None, no_external=False): res = requests.get( url, stream=True, # stream because sometimes we dont actually load all the content - verify=False, # sometimes ssl certs fail + verify=False, # snyk:ignore this is ok here, because we just run it in development to test if urls still exist # noqa headers={"User-Agent": "Mozilla/5.0"}, ) cache[url] = res.status_code @@ -78,6 +78,14 @@ def filter_tag(t: Tag) -> bool: else: ref = str(tag.get("src") or tag.get("href") or "") + # dont check vite develop urls + if "@vite" in ref or ref.endswith(".jsx") or ref.endswith(".js"): + continue + + # dont follow all the OEO classes + if "/oeo/" in ref: + continue + if not ref: continue diff --git a/dataedit/templates/dataedit/meta_edit.html b/dataedit/templates/dataedit/meta_edit.html index c2c0a7d38..95f142fea 100644 --- a/dataedit/templates/dataedit/meta_edit.html +++ b/dataedit/templates/dataedit/meta_edit.html @@ -135,7 +135,7 @@