diff --git a/.env.example b/.env.example index f002e016f1..7e14bd67f4 100644 --- a/.env.example +++ b/.env.example @@ -1,4 +1,2 @@ ALLOWED_HOSTS='.localhost, 127.0.0.1, [::1]' SECRET_KEY=2gr6ud88x=(p855_5nbj_+7^bw-iz&n7ldqv%94mjaecl+b9=4 -## Uncomment the setting below to put Mathesar in 'demo mode' -# DJANGO_SETTINGS_MODULE=demo.settings diff --git a/.github/workflows/run-e2e-integ-tests.yml.disabled b/.github/workflows/run-e2e-integ-tests.yml.disabled deleted file mode 100644 index 3184153f4e..0000000000 --- a/.github/workflows/run-e2e-integ-tests.yml.disabled +++ /dev/null @@ -1,59 +0,0 @@ -name: Run E2E integration tests -on: [push, pull_request] - -jobs: - build: - runs-on: ubuntu-latest - # We only want to run on external PRs, since internal PRs are covered by "push" - # This prevents this from running twice on internal PRs - if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - steps: - - uses: actions/checkout@v2 - - # In this step, this action saves a list of existing images, - # the cache is created without them in the post run. - # It also restores the cache if it exists. - - uses: satackey/action-docker-layer-caching@v0.0.11 - # Ignore the failure of a step and avoid terminating the job. - continue-on-error: true - with: - key: mathesar-docker-cache-integ-tests-{hash} - restore-keys: | - mathesar-docker-cache-e2e-tests- - - - name: Copy env file - run: cp .env.example .env - - - name: Use integ test dockerfile - run: rm Dockerfile && mv Dockerfile.integ-tests Dockerfile - - # The code is checked out under uid 1001 - reset this to 1000 for the - # container to run tests successfully - - name: Fix permissions - run: sudo chown -R 1000:1000 . - - - name: Build the stack - run: docker-compose up --build -d - - - name: Sleep for 60 seconds - run: sleep 60s - shell: bash - - # TODO: This needs to be handled inside the tests - - name: Run migrations - run: docker exec mathesar_service python manage.py migrate - - - name: Run type installation - run: docker exec mathesar_service python install.py --skip-confirm - - - name: Build front end - run: docker exec -w /code/mathesar_ui mathesar_service npx vite build - - - name: Run integ tests with pytest - run: docker exec mathesar_service pytest --browser chromium --browser webkit --browser firefox --video="on" --output="./videos/" mathesar/tests/integration/ -n0 - - - uses: actions/upload-artifact@v2 - if: ${{ failure() || success() }} - with: - name: Recordings - path: ${{ github.workspace }}/videos/ diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index bbd533d188..5c71fdcdc7 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -49,7 +49,26 @@ Before getting started with your code changes, read our [Contributor guide](./CO ## Loading sample data -For sample table data, you can create a new table in the UI using the `patents.csv` file found in `/mathesar/tests/data`. +- Using a CSV File (limited visibility of features): + + For sample table data, you can create a new table in the UI using the `patents.csv` file found in `/mathesar/tests/data`. + +- Using Mathesar Data Playground (recommended): + + 1. Clone the `mathesar-data-playground` repo: + ``` + git clone https://github.com/mathesar-foundation/mathesar-data-playground.git + ``` + + 2. Load the data from sql by running: + ``` + sudo docker exec -i mathesar_dev_db bash -c 'psql -U mathesar' < /path/to/your/cloned/repo/mathesar-data-playground/realistic_library_simulation/simulation_runs/simulation_run_20230106_00.sql + ``` + ``` + sudo docker exec -i mathesar_dev_db bash -c 'psql -U mathesar' < /path/to/your/cloned/repo/mathesar-data-playground/realistic_library_simulation/simulation_runs/simulation_run_20230106_00_checkouts.sql + ``` + 3. [Sync]( https://docs.mathesar.org/user-guide/syncing-db/) these changes from the UI. + @@ -216,12 +235,6 @@ If you'd like to manually push or pull translations, follow the instructions in 1. Commit and push the changes to our repo. -## Demo mode - -Mathesar can be run in "demo mode" to meet the specific needs of our [live demo site](https://demo.mathesar.org). - -See our [Live demo mode](./demo/README.md) guide for more information on enabling live demo mode locally - ## Opening a shell in the container @@ -236,37 +249,6 @@ See our [Live demo mode](./demo/README.md) guide for more information on enablin ``` docker exec -it mathesar_dev_db psql -U mathesar ``` - - -## Building Debian package - -- On a Debian machine, install the following dependencies - - ``` - sudo apt install debhelper-compat dh-virtualenv libsystemd-dev libpq-dev libicu-dev pkg-config lsb-release python3-dev python3 python3-setuptools python3-pip python3-venv tar - ``` -- Setup Mathesar build environment. - This step is useful only when testing locally is needed for building static files and for collecting them. We won't have a need for this step while using the build service as it will be using the source code from release assets which will contain these static files - - -- Install Python and Nodejs preferably on a Linux machine -- Run the following commands to set up the environment - - ``` - python3 -m venv ./mathesar-venv - source ./mathesar-venv/bin/activate - pip install -r requirements.txt - sudo npm install -g npm-force-resolutions - cd mathesar_ui && npm install --unsafe-perm && npm run build - cd .. - python manage.py collectstatic - ``` - -- From the mathesar directory, run the build script to generate the debian package - - ``` - cd release-scripts && source build-debian.sh - ``` ## Troubleshooting diff --git a/Dockerfile b/Dockerfile index 49f667b82a..3e836d05d0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -87,9 +87,9 @@ EXPOSE 8000 3000 6006 ENTRYPOINT ["./dev-run.sh"] -#=========== STAGE: COMMON ===================================================# +#=========== STAGE: PRODUCTION ===============================================# -from base as common +from base as production # Install prod requirements RUN pip install --no-cache-dir -r requirements-prod.txt @@ -105,23 +105,6 @@ RUN rm -rf ./mathesar_ui RUN rm -rf ./mathesar/tests ./db/tests RUN rm -rf ./docs - -#=========== STAGE: DEMO =====================================================# - -FROM common AS demo - -# Install prod requirements -RUN pip install --no-cache-dir -r requirements-demo.txt - -EXPOSE 8000 - -ENTRYPOINT ["./run.sh"] - - -#=========== STAGE: PRODUCTION ===============================================# - -FROM common AS production - EXPOSE 8000 ENTRYPOINT ["./run.sh"] diff --git a/Dockerfile.integ-tests b/Dockerfile.integ-tests deleted file mode 100644 index ad04a97416..0000000000 --- a/Dockerfile.integ-tests +++ /dev/null @@ -1,141 +0,0 @@ -FROM buildpack-deps:focal - -ARG DEBIAN_FRONTEND=noninteractive - -# Install python -# Some of this is ported from docker file for python-3.9: -# https://github.com/docker-library/python/blob/33751272d8171cece37c59180c049ab77cf9c837/3.9/buster/Dockerfile - -# ensure local python is preferred over distribution python -ENV PATH /usr/local/bin:$PATH - -# http://bugs.python.org/issue19846 -# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. -ENV LANG C.UTF-8 - -# Download node source -ENV NODE_MAJOR 18 - -RUN apt-get update -RUN apt-get install -y ca-certificates curl gnupg -RUN mkdir -p /etc/apt/keyrings -RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg -RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list - -RUN apt-get update -RUN apt-get install nodejs -y - -# extra dependencies (over what buildpack-deps already includes) -RUN apt-get update && apt-get install -y --no-install-recommends \ - libbluetooth-dev \ - tk-dev \ - uuid-dev \ - sudo \ - nodejs \ - && rm -rf /var/lib/apt/lists/* - -ENV GPG_KEY E3FF2839C048B25C084DEBE9B26995E310250568 -ENV PYTHON_VERSION 3.9.8 - -RUN set -ex \ - \ - && wget -O python.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ - && wget -O python.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ - && export GNUPGHOME="$(mktemp -d)" \ - && gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$GPG_KEY" \ - && gpg --batch --verify python.tar.xz.asc python.tar.xz \ - && { command -v gpgconf > /dev/null && gpgconf --kill all || :; } \ - && rm -rf "$GNUPGHOME" python.tar.xz.asc \ - && mkdir -p /usr/src/python \ - && tar -xJC /usr/src/python --strip-components=1 -f python.tar.xz \ - && rm python.tar.xz \ - \ - && cd /usr/src/python \ - && gnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" \ - && ./configure \ - --build="$gnuArch" \ - --enable-loadable-sqlite-extensions \ - --enable-optimizations \ - --enable-option-checking=fatal \ - --enable-shared \ - --with-system-expat \ - --with-system-ffi \ - --without-ensurepip \ - && make -j "$(nproc)" \ - && make install \ - && rm -rf /usr/src/python \ - \ - && find /usr/local -depth \ - \( \ - \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ - -o \( -type f -a \( -name '*.pyc' -o -name '*.pyo' -o -name '*.a' \) \) \ - \) -exec rm -rf '{}' + \ - \ - && ldconfig \ - \ - && python3 --version - -# make some useful symlinks that are expected to exist -RUN cd /usr/local/bin \ - && ln -s idle3 idle \ - && ln -s pydoc3 pydoc \ - && ln -s python3 python \ - && ln -s python3-config python-config - -# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value ''" -ENV PYTHON_PIP_VERSION 21.2.4 -# https://github.com/docker-library/python/issues/365 -ENV PYTHON_SETUPTOOLS_VERSION 57.5.0 -# https://github.com/pypa/get-pip -ENV PYTHON_GET_PIP_URL https://github.com/pypa/get-pip/raw/d781367b97acf0ece7e9e304bf281e99b618bf10/public/get-pip.py -ENV PYTHON_GET_PIP_SHA256 01249aa3e58ffb3e1686b7141b4e9aac4d398ef4ac3012ed9dff8dd9f685ffe0 - -RUN set -ex; \ - \ - wget -O get-pip.py "$PYTHON_GET_PIP_URL"; \ - echo "$PYTHON_GET_PIP_SHA256 *get-pip.py" | sha256sum --check --strict -; \ - \ - python get-pip.py \ - --disable-pip-version-check \ - --no-cache-dir \ - "pip==$PYTHON_PIP_VERSION" \ - "setuptools==$PYTHON_SETUPTOOLS_VERSION" \ - ; \ - pip --version; \ - \ - find /usr/local -depth \ - \( \ - \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ - -o \ - \( -type f -a \( -name '*.pyc' -o -name '*.pyo' \) \) \ - \) -exec rm -rf '{}' +; \ - rm -f get-pip.py - -ENV PYTHONUNBUFFERED=1 -ENV DOCKERIZE_VERSION v0.6.1 - -# Install dockerize -RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ - && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ - && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz - -RUN pip install playwright==1.18.2 -RUN pip install pytest-playwright==0.2.3 -RUN playwright install -RUN playwright install-deps - -# Change work directory -WORKDIR /code/ - -COPY requirements.txt . -COPY requirements-dev.txt . -COPY requirements-demo.txt . - -RUN pip install -r requirements.txt -RUN pip install -r requirements-dev.txt -RUN pip install -r requirements-demo.txt -COPY . . - -RUN cd mathesar_ui && npm ci && npm run build - -EXPOSE 8000 3000 6006 diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index cf8c9939f0..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1 +0,0 @@ -recursive-include static * \ No newline at end of file diff --git a/config/context_processors.py b/config/context_processors.py index 85d6fa80a7..39dc562e9b 100644 --- a/config/context_processors.py +++ b/config/context_processors.py @@ -14,9 +14,6 @@ def frontend_settings(request): frontend_settings = { 'development_mode': development_mode, 'manifest_data': manifest_data, - 'live_demo_mode': getattr(settings, 'MATHESAR_LIVE_DEMO', False), - 'live_demo_username': getattr(settings, 'MATHESAR_LIVE_DEMO_USERNAME', None), - 'live_demo_password': getattr(settings, 'MATHESAR_LIVE_DEMO_PASSWORD', None), 'display_language': display_language, 'include_i18n_fallback': display_language != fallback_language, } diff --git a/config/settings/common_settings.py b/config/settings/common_settings.py index 930482cbfa..0aeab18afc 100644 --- a/config/settings/common_settings.py +++ b/config/settings/common_settings.py @@ -42,7 +42,6 @@ def pipe_delim(pipe_string): "rest_framework", "django_filters", "django_property_filter", - "drf_spectacular", "modernrpc", "mathesar", ] @@ -66,7 +65,27 @@ def pipe_delim(pipe_string): ROOT_URLCONF = "config.urls" MODERNRPC_METHODS_MODULES = [ - 'mathesar.rpc.connections' + 'mathesar.rpc.collaborators', + 'mathesar.rpc.columns', + 'mathesar.rpc.columns.metadata', + 'mathesar.rpc.connections', + 'mathesar.rpc.constraints', + 'mathesar.rpc.data_modeling', + 'mathesar.rpc.databases', + 'mathesar.rpc.databases.configured', + 'mathesar.rpc.databases.privileges', + 'mathesar.rpc.databases.setup', + 'mathesar.rpc.explorations', + 'mathesar.rpc.records', + 'mathesar.rpc.roles', + 'mathesar.rpc.roles.configured', + 'mathesar.rpc.schemas', + 'mathesar.rpc.schemas.privileges', + 'mathesar.rpc.servers.configured', + 'mathesar.rpc.tables', + 'mathesar.rpc.tables.metadata', + 'mathesar.rpc.tables.privileges', + 'mathesar.rpc.types', ] TEMPLATES = [ @@ -207,18 +226,6 @@ def pipe_delim(pipe_string): 'TEST_REQUEST_DEFAULT_FORMAT': 'json', 'EXCEPTION_HANDLER': 'mathesar.exception_handlers.mathesar_exception_handler', - 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema' -} -SPECTACULAR_SETTINGS = { - 'TITLE': 'Mathesar API', - 'DESCRIPTION': '', - 'VERSION': '1.0.0', - 'SERVE_INCLUDE_SCHEMA': False, - 'PREPROCESSING_HOOKS': ['config.settings.openapi.custom_preprocessing_hook'], - 'POSTPROCESSING_HOOKS': [ - 'config.settings.openapi.remove_url_prefix_hook', - ], - # OTHER SETTINGS } FRIENDLY_ERRORS = { 'FIELD_ERRORS': { @@ -257,6 +264,8 @@ def pipe_delim(pipe_string): MATHESAR_CAPTURE_UNHANDLED_EXCEPTION = decouple_config('CAPTURE_UNHANDLED_EXCEPTION', default=False) MATHESAR_STATIC_NON_CODE_FILES_LOCATION = os.path.join(BASE_DIR, 'mathesar/static/non-code/') +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + # UI source files have to be served by Django in order for static assets to be included during dev mode # https://vitejs.dev/guide/assets.html # https://vitejs.dev/guide/backend-integration.html diff --git a/config/settings/openapi.py b/config/settings/openapi.py deleted file mode 100644 index 85a1cd0ca5..0000000000 --- a/config/settings/openapi.py +++ /dev/null @@ -1,28 +0,0 @@ -def custom_preprocessing_hook(endpoints): - prefixes = [ - "/api/db/v0/databases/", - "/api/db/v0/data_files/", - "/api/db/v0/schemas/", - "/api/db/v0/tables/", - "/api/db/v0/links/", - "/api/db/v0/queries/", - "/api/ui/v0/databases/", - "/api/ui/v0/users/", - "/api/ui/v0/database_roles/" - ] - filtered = [(path, path_regex, method, callback) for path, path_regex, method, callback in endpoints if any(path.startswith(prefix) for prefix in prefixes)] - return filtered - - -def remove_url_prefix_hook(result, **kwargs): - # Remove namespace and version URL prefix from the operation Id of the generated API schema - for path, path_info in result['paths'].items(): - for method, operation in path_info.items(): - operation_id = operation.get('operationId') - if operation_id: - if path.startswith('/api/db/v0/'): - operation['operationId'] = operation_id.replace('db_v0_', '') - elif path.startswith('/api/ui/v0/'): - operation['operationId'] = operation_id.replace('ui_v0_', '') - - return result diff --git a/conftest.py b/conftest.py index 41589a1e07..a452c313e2 100644 --- a/conftest.py +++ b/conftest.py @@ -2,6 +2,7 @@ import random import string import os +import psycopg # These imports come from the mathesar namespace, because our DB setup logic depends on it. from django.db import connection as dj_connection @@ -11,10 +12,9 @@ from sqlalchemy_utils import database_exists, create_database, drop_database from db.engine import add_custom_types_to_ischema_names, create_engine as sa_create_engine -from db.types import install from db.sql import install as sql_install -from db.schemas.operations.drop import drop_schema as drop_sa_schema -from db.schemas.operations.create import create_schema as create_sa_schema +from db.schemas.operations.drop import drop_schema_via_name as drop_sa_schema +from db.schemas.operations.create import create_schema_if_not_exists_via_sql_alchemy from db.schemas.utils import get_schema_oid_from_name, get_schema_name_from_oid from fixtures.utils import create_scoped_fixtures @@ -74,8 +74,8 @@ def __create_db(db_name): create_database(engine.url) created_dbs.add(db_name) # Our default testing database has our types and functions preinstalled. - sql_install.install(engine) - install.install_mathesar_on_database(engine) + with psycopg.connect(str(engine.url)) as conn: + sql_install.install(conn) engine.dispose() return db_name yield __create_db @@ -210,7 +210,7 @@ def _create_schema(schema_name, engine, schema_mustnt_exist=True): if schema_mustnt_exist: assert schema_name not in created_schemas logger.debug(f'creating {schema_name}') - create_sa_schema(schema_name, engine, if_not_exists=True) + create_schema_if_not_exists_via_sql_alchemy(schema_name, engine) schema_oid = get_schema_oid_from_name(schema_name, engine) db_name = engine.url.database created_schemas_in_this_engine = created_schemas.setdefault(db_name, {}) @@ -225,7 +225,7 @@ def _create_schema(schema_name, engine, schema_mustnt_exist=True): # Handle schemas being renamed during test schema_name = get_schema_name_from_oid(schema_oid, engine) if schema_name: - drop_sa_schema(schema_name, engine, cascade=True, if_exists=True) + drop_sa_schema(engine, schema_name, cascade=True) logger.debug(f'dropping {schema_name}') except OperationalError as e: logger.debug(f'ignoring operational error: {e}') diff --git a/db/columns/operations/alter.py b/db/columns/operations/alter.py index 04d089162d..bb0740c767 100644 --- a/db/columns/operations/alter.py +++ b/db/columns/operations/alter.py @@ -29,7 +29,7 @@ def alter_column(engine, table_oid, column_attnum, column_data, connection=None) "description": } """ - column_alter_def = _process_column_alter_dict(column_data, column_attnum) + column_alter_def = _process_column_alter_dict_dep(column_data, column_attnum) requested_type = column_alter_def.get("type", {}).get("name") if connection is None: try: @@ -154,7 +154,7 @@ def batch_update_columns(table_oid, engine, column_data_list): """ Alter the given columns of the table. - For details on the column_data_list format, see _process_column_alter_dict. + For details on the column_data_list format, see _process_column_alter_dict_dep. Args: table_oid: the OID of the table whose columns we'll alter. @@ -167,7 +167,7 @@ def batch_update_columns(table_oid, engine, column_data_list): engine, 'alter_columns', table_oid, json.dumps( - [_process_column_alter_dict(column) for column in column_data_list] + [_process_column_alter_dict_dep(column) for column in column_data_list] ) ) except InvalidParameterValue: @@ -180,7 +180,81 @@ def batch_update_columns(table_oid, engine, column_data_list): raise InvalidTypeOptionError -def _process_column_alter_dict(column_data, column_attnum=None): +def alter_columns_in_table(table_oid, column_data_list, conn): + """ + Alter columns of the given table in bulk. + + For a description of column_data_list, see _transform_column_alter_dict + + Args: + table_oid: The OID of the table whose columns we'll alter. + column_data_list: a list of dicts describing the alterations to make. + """ + transformed_column_data = [ + _transform_column_alter_dict(column) for column in column_data_list + ] + db_conn.exec_msar_func( + conn, 'alter_columns', table_oid, json.dumps(transformed_column_data) + ) + return len(column_data_list) + + +# TODO This function wouldn't be needed if we had the same form in the DB +# as the RPC API function. +def _transform_column_alter_dict(data): + """ + Transform the data dict into the form needed for the DB functions. + + Input data form: + { + "id": , + "name": , + "type": , + "type_options": , + "nullable": , + "default": {"value": } + "description": + } + + Output form: + { + "attnum": , + "type": {"name": , "options": }, + "name": , + "not_null": , + "default": , + "description": + } + + Note that keys with empty values will be dropped, except "default" + and "description". Explicitly setting these to None requests dropping + the associated property of the underlying column. + """ + type_ = {"name": data.get('type'), "options": data.get('type_options')} + new_type = {k: v for k, v in type_.items() if v} or None + nullable = data.get(NULLABLE) + not_null = not nullable if nullable is not None else None + column_name = (data.get(NAME) or '').strip() or None + raw_alter_def = { + "attnum": data["id"], + "type": new_type, + "not_null": not_null, + "name": column_name, + "description": data.get("description") + } + alter_def = {k: v for k, v in raw_alter_def.items() if v is not None} + + default_dict = data.get("default", {}) + if default_dict is None: + alter_def.update(default=None) + elif "value" in default_dict: + alter_def.update(default=default_dict["value"]) + + return alter_def + + +# TODO This function is deprecated. Remove it when possible. +def _process_column_alter_dict_dep(column_data, column_attnum=None): """ Transform the column_data dict into the form needed for the DB functions. @@ -221,7 +295,7 @@ def _process_column_alter_dict(column_data, column_attnum=None): column_not_null = not column_nullable if column_nullable is not None else None column_name = (column_data.get(NAME) or '').strip() or None raw_col_alter_def = { - "attnum": column_attnum or column_data.get("attnum"), + "attnum": column_attnum or column_data.get("attnum") or column_data.get("id"), "type": new_type, "not_null": column_not_null, "name": column_name, diff --git a/db/columns/operations/create.py b/db/columns/operations/create.py index 7361276015..ddaf29d4b3 100644 --- a/db/columns/operations/create.py +++ b/db/columns/operations/create.py @@ -5,39 +5,18 @@ from alembic.operations import Operations from psycopg.errors import InvalidTextRepresentation, InvalidParameterValue -from db.columns.defaults import DEFAULT, NAME, NULLABLE, TYPE, DESCRIPTION +from db import connection as db_conn +from db.columns.defaults import DEFAULT, NAME, NULLABLE, DESCRIPTION from db.columns.exceptions import InvalidDefaultError, InvalidTypeOptionError -from db.connection import execute_msar_func_with_engine from db.tables.operations.select import reflect_table_from_oid from db.types.base import PostgresType from db.metadata import get_empty_metadata def create_column(engine, table_oid, column_data): - column_name = (column_data.get(NAME) or '').strip() or None - column_type_id = ( - column_data.get( - # TYPE = 'sa_type'. This is coming straight from the API. - # TODO Determine whether we actually need 'sa_type' and 'type' - TYPE, column_data.get("type") - ) - or PostgresType.CHARACTER_VARYING.id - ) - column_type_options = column_data.get("type_options", {}) - column_nullable = column_data.get(NULLABLE, True) - default_value = column_data.get(DEFAULT, {}).get('value') - column_description = column_data.get(DESCRIPTION) - col_create_def = [ - { - "name": column_name, - "type": {"name": column_type_id, "options": column_type_options}, - "not_null": not column_nullable, - "default": default_value, - "description": column_description, - } - ] + col_create_def = [_transform_column_create_dict(column_data)] try: - curr = execute_msar_func_with_engine( + curr = db_conn.execute_msar_func_with_engine( engine, 'add_columns', table_oid, json.dumps(col_create_def) @@ -49,6 +28,64 @@ def create_column(engine, table_oid, column_data): return curr.fetchone()[0] +def add_columns_to_table(table_oid, column_data_list, conn): + """ + Add columns to the given table. + + For a description of the members of column_data_list, see + _transform_column_create_dict + + Args: + table_oid: The OID of the table whose columns we'll alter. + column_data_list: A list of dicts describing columns to add. + conn: A psycopg connection. + """ + transformed_column_data = [ + _transform_column_create_dict(col) for col in column_data_list + ] + result = db_conn.exec_msar_func( + conn, 'add_columns', table_oid, json.dumps(transformed_column_data) + ).fetchone()[0] + return result + + +# TODO This function wouldn't be needed if we had the same form in the DB +# as the RPC API function. +def _transform_column_create_dict(data): + """ + Transform the data dict into the form needed for the DB functions. + + Input data form: + { + "name": , + "type": , + "type_options": , + "nullable": , + "default": {"value": } + "description": + } + + Output form: + { + "type": {"name": , "options": }, + "name": , + "not_null": , + "default": , + "description": + } + """ + return { + "name": (data.get(NAME) or '').strip() or None, + "type": { + "name": data.get("type") or PostgresType.CHARACTER_VARYING.id, + "options": data.get("type_options", {}) + }, + "not_null": not data.get(NULLABLE, True), + "default": data.get(DEFAULT, {}).get('value'), + "description": data.get(DESCRIPTION), + } + + def bulk_create_mathesar_column(engine, table_oid, columns, schema): # TODO reuse metadata table = reflect_table_from_oid(table_oid, engine, metadata=get_empty_metadata()) @@ -67,7 +104,7 @@ def duplicate_column( copy_data=True, copy_constraints=True ): - curr = execute_msar_func_with_engine( + curr = db_conn.execute_msar_func_with_engine( engine, 'copy_column', table_oid, diff --git a/db/columns/operations/drop.py b/db/columns/operations/drop.py index 8e2910bb45..a9227af358 100644 --- a/db/columns/operations/drop.py +++ b/db/columns/operations/drop.py @@ -1,14 +1,14 @@ -"""The function in this module wraps SQL functions that drop columns.""" +"""The functions in this module wrap SQL functions that drop columns.""" from db import connection as db_conn def drop_column(table_oid, column_attnum, engine): """ - Drop the given columns from the given table. + Drop the given column from the given table. Args: - table_oid: OID of the table whose columns we'll drop. - column_attnum: The attnums of the columns to drop. + table_oid: OID of the table whose column we'll drop. + column_attnum: The attnum of the column to drop. engine: SQLAlchemy engine object for connecting. Returns: @@ -17,3 +17,17 @@ def drop_column(table_oid, column_attnum, engine): return db_conn.execute_msar_func_with_engine( engine, 'drop_columns', table_oid, column_attnum ).fetchone()[0] + + +def drop_columns_from_table(table_oid, column_attnums, conn): + """ + Drop the given columns from the given table. + + Args: + table_oid: OID of the table whose columns we'll drop. + column_attnums: The attnums of the columns to drop. + conn: A psycopg connection to the relevant database. + """ + return db_conn.exec_msar_func( + conn, 'drop_columns', table_oid, *column_attnums + ).fetchone()[0] diff --git a/db/columns/operations/select.py b/db/columns/operations/select.py index d1b22aba6d..5ef5f1f508 100644 --- a/db/columns/operations/select.py +++ b/db/columns/operations/select.py @@ -3,11 +3,49 @@ from sqlalchemy import and_, asc, cast, select, text, exists, Identity from db.columns.exceptions import DynamicDefaultWarning -from db.connection import execute_msar_func_with_engine +from db.connection import execute_msar_func_with_engine, exec_msar_func from db.tables.operations.select import reflect_table_from_oid from db.utils import execute_statement, get_pg_catalog_table +def get_column_info_for_table(table, conn): + """ + Return a list of dictionaries describing the columns of the table. + + The `table` can be given as either a "qualified name", or an OID. + The OID is the preferred identifier, since it's much more robust. + + The returned list contains dictionaries of the following form: + + { + "id": , + "name": , + "type": , + "type_options": { + "precision": , + "scale": , + "fields": , + "length": , + "item_type": , + }, + "nullable": , + "primary_key": , + "valid_target_types": [, , ..., ] + "default": {"value": , "is_dynamic": }, + "has_dependents": , + "current_role_priv": [, , ...], + "description": + } + + The fields of the "type_options" dictionary are all optional, + depending on the "type" value. + + Args: + table: The table for which we want column info. + """ + return exec_msar_func(conn, 'get_column_info', table).fetchone()[0] + + def get_column_description(oid, attnum, engine): cursor = execute_msar_func_with_engine(engine, 'col_description', oid, attnum) row = cursor.fetchone() diff --git a/db/connection.py b/db/connection.py index 2b546256d3..1430e5b902 100644 --- a/db/connection.py +++ b/db/connection.py @@ -1,5 +1,6 @@ from sqlalchemy import text import psycopg +from psycopg.rows import dict_row def execute_msar_func_with_engine(engine, func_name, *args): @@ -17,7 +18,7 @@ def execute_msar_func_with_engine(engine, func_name, *args): with psycopg.connect(conn_str) as conn: # Returns a cursor return conn.execute( - f"SELECT msar.{func_name}({','.join(['%s']*len(args))})", + f"SELECT msar.{func_name}({','.join(['%s'] * len(args))})", args ) @@ -40,8 +41,37 @@ def execute_msar_func_with_psycopg2_conn(conn, func_name, *args): return conn.execute(stmt) -def load_file_with_engine(engine, file_handle): +def exec_msar_func(conn, func_name, *args): + """ + Execute an msar function using a psycopg (3) connection. + + Args: + conn: a psycopg connection + func_name: The unqualified msar_function name (danger; not sanitized) + *args: The list of parameters to pass + """ + # Returns a cursor + return conn.execute( + f"SELECT msar.{func_name}({','.join(['%s'] * len(args))})", args + ) + + +def select_from_msar_func(conn, func_name, *args): + """ + Select all records from an msar function using a psycopg (3) connection. + + Args: + conn: a psycopg connection + func_name: The unqualified msar_function name (danger; not sanitized) + *args: The list of parameters to pass + """ + cursor = conn.execute( + f"SELECT * FROM msar.{func_name}({','.join(['%s'] * len(args))})", args + ) + cursor.row_factory = dict_row + return cursor.fetchall() + + +def load_file_with_conn(conn, file_handle): """Run an SQL script from a file, using psycopg.""" - conn_str = str(engine.url) - with psycopg.connect(conn_str) as conn: - conn.execute(file_handle.read()) + conn.execute(file_handle.read()) diff --git a/db/constants.py b/db/constants.py index efc7a2504d..76f7d52c6d 100644 --- a/db/constants.py +++ b/db/constants.py @@ -1,8 +1,18 @@ -MATHESAR_PREFIX = "mathesar_" ID = "id" ID_ORIGINAL = "id_original" -INFERENCE_SCHEMA = f"{MATHESAR_PREFIX}inference_schema" COLUMN_NAME_TEMPLATE = 'Column ' # auto generated column name 'Column 1' (no undescore) -MSAR_PUBLIC = 'msar' -MSAR_PRIVAT = f"__{MSAR_PUBLIC}" -MSAR_VIEWS = f"{MSAR_PUBLIC}_views" + +MATHESAR_PREFIX = "mathesar_" +MSAR_PUBLIC_SCHEMA = 'msar' +MSAR_PRIVATE_SCHEMA = f"__{MSAR_PUBLIC_SCHEMA}" +TYPES_SCHEMA = f"{MATHESAR_PREFIX}types" +INFERENCE_SCHEMA = f"{MATHESAR_PREFIX}inference_schema" +VIEWS_SCHEMA = f"{MSAR_PUBLIC_SCHEMA}_views" + +INTERNAL_SCHEMAS = { + TYPES_SCHEMA, + MSAR_PUBLIC_SCHEMA, + MSAR_PRIVATE_SCHEMA, + VIEWS_SCHEMA, + INFERENCE_SCHEMA +} diff --git a/db/constraints/operations/create.py b/db/constraints/operations/create.py index d12d8591a5..45b9b4600f 100644 --- a/db/constraints/operations/create.py +++ b/db/constraints/operations/create.py @@ -1,13 +1,14 @@ -from db.connection import execute_msar_func_with_engine +import json +from db.connection import execute_msar_func_with_engine, exec_msar_func -def add_constraint(constraint_obj, engine): + +def add_constraint_via_sql_alchemy(constraint_obj, engine): """ Add a constraint. Args: - constraint_obj: A constraint object instantiatated with appropriate - params. + constraint_obj: (See __msar.process_con_def_jsonb for details) engine: SQLAlchemy engine object for connecting. Returns: @@ -19,3 +20,17 @@ def add_constraint(constraint_obj, engine): constraint_obj.table_oid, constraint_obj.get_constraint_def_json() ).fetchone()[0] + + +def create_constraint(table_oid, constraint_obj_list, conn): + """ + Create a constraint using a psycopg connection. + + Args: + constraint_obj_list: (See __msar.process_con_def_jsonb for details) + conn: a psycopg connection + + Returns: + Returns a list of oid(s) of constraints for a given table. + """ + return exec_msar_func(conn, 'add_constraints', table_oid, json.dumps(constraint_obj_list)).fetchone()[0] diff --git a/db/constraints/operations/drop.py b/db/constraints/operations/drop.py index c2f4807098..5faeb1bba9 100644 --- a/db/constraints/operations/drop.py +++ b/db/constraints/operations/drop.py @@ -1,4 +1,4 @@ -from db.connection import execute_msar_func_with_engine +from db.connection import execute_msar_func_with_engine, exec_msar_func def drop_constraint(table_name, schema_name, engine, constraint_name): @@ -17,3 +17,19 @@ def drop_constraint(table_name, schema_name, engine, constraint_name): return execute_msar_func_with_engine( engine, 'drop_constraint', schema_name, table_name, constraint_name ).fetchone()[0] + + +def drop_constraint_via_oid(table_oid, constraint_oid, conn): + """ + Drop a constraint. + + Args: + table_oid: Identity of the table to delete constraint for. + constraint_oid: The OID of the constraint to delete. + + Returns: + The name of the dropped constraint. + """ + return exec_msar_func( + conn, 'drop_constraint', table_oid, constraint_oid + ).fetchone()[0] diff --git a/db/constraints/operations/select.py b/db/constraints/operations/select.py index df0e73fc9d..de47bacb1d 100644 --- a/db/constraints/operations/select.py +++ b/db/constraints/operations/select.py @@ -1,7 +1,12 @@ +from sqlalchemy import select, and_ + +from db.connection import select_from_msar_func from db.utils import get_pg_catalog_table from db.metadata import get_empty_metadata -from sqlalchemy import select, and_ + +def get_constraints_for_table(table_oid, conn): + return select_from_msar_func(conn, 'get_constraints_for_table', table_oid) def get_constraints_with_oids(engine, table_oid=None): diff --git a/demo/install/__init__.py b/db/databases/__init__.py similarity index 100% rename from demo/install/__init__.py rename to db/databases/__init__.py diff --git a/demo/management/__init__.py b/db/databases/operations/__init__.py similarity index 100% rename from demo/management/__init__.py rename to db/databases/operations/__init__.py diff --git a/db/databases/operations/drop.py b/db/databases/operations/drop.py new file mode 100644 index 0000000000..1d58aa4e4d --- /dev/null +++ b/db/databases/operations/drop.py @@ -0,0 +1,14 @@ +from db.connection import exec_msar_func +from psycopg import sql + + +def drop_database(database_oid, conn): + cursor = conn.cursor() + conn.autocommit = True + drop_database_query = exec_msar_func( + conn, + 'drop_database_query', + database_oid + ).fetchone()[0] + cursor.execute(sql.SQL(drop_database_query)) + cursor.close() diff --git a/db/databases/operations/select.py b/db/databases/operations/select.py new file mode 100644 index 0000000000..d95b1e8f75 --- /dev/null +++ b/db/databases/operations/select.py @@ -0,0 +1,5 @@ +from db.connection import exec_msar_func + + +def get_database(conn): + return exec_msar_func(conn, 'get_current_database_info').fetchone()[0] diff --git a/db/install.py b/db/install.py index fbeede2466..2fd67ef57e 100644 --- a/db/install.py +++ b/db/install.py @@ -1,10 +1,8 @@ -from psycopg.errors import InsufficientPrivilege -from sqlalchemy import text -from sqlalchemy.exc import OperationalError, ProgrammingError +import psycopg +from psycopg.errors import OperationalError, InsufficientPrivilege +from psycopg import sql -from db import engine from db.sql import install as sql_install -from db.types import install as types_install def install_mathesar( @@ -18,19 +16,22 @@ def install_mathesar( root_db='postgres' ): """Create database and install Mathesar on it.""" - user_db_engine = engine.create_future_engine( - username, password, hostname, database_name, port, - connect_args={"connect_timeout": 10} - ) + try: - user_db_engine.connect() + conn = psycopg.connect( + host=hostname, + port=port, + dbname=database_name, + user=username, + password=password, + ) print( "Installing Mathesar on preexisting PostgreSQL database" f" {database_name} at host {hostname}..." ) - sql_install.install(user_db_engine) - types_install.install_mathesar_on_database(user_db_engine) - user_db_engine.dispose() + with conn: + sql_install.install(conn) + except OperationalError as e: if create_db: database_created = _create_database( @@ -45,13 +46,19 @@ def install_mathesar( else: database_created = False if database_created: + conn = psycopg.connect( + host=hostname, + port=port, + dbname=database_name, + user=username, + password=password, + ) print( "Installing Mathesar on PostgreSQL database" f" {database_name} at host {hostname}..." ) - sql_install.install(user_db_engine) - types_install.install_mathesar_on_database(user_db_engine) - user_db_engine.dispose() + with conn: + sql_install.install(conn) else: print(f"Skipping installing on DB with key {database_name}.") raise e @@ -71,21 +78,24 @@ def _create_database( # Database. So we use the default database `postgres` that comes with # postgres. # TODO Throw correct error when the root database does not exist. - root_db_engine = engine.create_future_engine( - username, password, hostname, root_database, port, - connect_args={"connect_timeout": 10} + root_db_conn = psycopg.connect( + host=hostname, + port=port, + dbname=root_database, + user=username, + password=password, ) try: - with root_db_engine.connect() as conn: - conn.execution_options(isolation_level="AUTOCOMMIT") - conn.execute(text(f'CREATE DATABASE "{db_name}"')) - root_db_engine.dispose() + with root_db_conn as conn: + cursor = conn.cursor() + conn.autocommit = True + cursor.execute(sql.SQL(f'CREATE DATABASE "{db_name}"')) + cursor.close() print(f"Created DB is {db_name}.") return True - except ProgrammingError as e: - if isinstance(e.orig, InsufficientPrivilege): - print(f"Database {db_name} could not be created due to Insufficient Privilege") - return False + except InsufficientPrivilege: + print(f"Database {db_name} could not be created due to Insufficient Privilege") + return False except Exception: print(f"Database {db_name} could not be created!") return False diff --git a/db/links/operations/create.py b/db/links/operations/create.py index d179745be6..1ffc89a738 100644 --- a/db/links/operations/create.py +++ b/db/links/operations/create.py @@ -1,4 +1,6 @@ -from db.connection import execute_msar_func_with_engine +import json + +from db.connection import execute_msar_func_with_engine, exec_msar_func def create_foreign_key_link( @@ -25,14 +27,43 @@ def create_foreign_key_link( """ return execute_msar_func_with_engine( engine, - 'create_many_to_one_link', - referent_table_oid, - referrer_table_oid, + 'add_foreign_key_column', referrer_column_name, + referrer_table_oid, + referent_table_oid, unique_link ).fetchone()[0] +def add_foreign_key_column( + conn, + column_name, + referrer_table_oid, + referent_table_oid, + unique_link=False +): + """ + Creates a Many-to-One or One-to-One link. + + Args: + conn: psycopg3 connection object. + column_name: Name of the new column to be created in the referrer + table. + referrer_table_oid: The OID of the referrer table. + referent_table_oid: The OID of the referent table. + unique_link: Whether to make the link one-to-one + instead of many-to-one. + """ + exec_msar_func( + conn, + 'add_foreign_key_column', + column_name, + referrer_table_oid, + referent_table_oid, + unique_link + ) + + def create_many_to_many_link(engine, schema_oid, map_table_name, referents_dict): """ Creates a Many-to-Many link. @@ -51,9 +82,44 @@ def create_many_to_many_link(engine, schema_oid, map_table_name, referents_dict) """ return execute_msar_func_with_engine( engine, - 'create_many_to_many_link', + 'add_mapping_table', schema_oid, map_table_name, - referents_dict['referent_table_oids'], - referents_dict['column_names'] + json.dumps( + [ + {"column_name": c, "referent_table_oid": i} + for c, i in zip( + referents_dict['column_names'], + referents_dict['referent_table_oids'], + ) + ] + ) ).fetchone()[0] + + +def add_mapping_table( + conn, + schema_oid, + table_name, + mapping_columns, +): + """ + Add a mapping table to give a many-to-many link between referents. + + Args: + conn: psycopg3 connection object. + schema_oid: The OID of the schema for the mapping table. + table_name: The name for the new mapping table. + mapping_columns: A list of dictionaries giving the foreign key + columns to create in the mapping table. + + The elements of the mapping_columns list must have the form + {"column_name": , "referent_table_oid": } + """ + exec_msar_func( + conn, + 'add_mapping_table', + schema_oid, + table_name, + json.dumps(mapping_columns) + ) diff --git a/db/records/operations/delete.py b/db/records/operations/delete.py index 02454da56e..ff1f656514 100644 --- a/db/records/operations/delete.py +++ b/db/records/operations/delete.py @@ -1,8 +1,28 @@ +import json from sqlalchemy import delete +from db import connection as db_conn from db.tables.utils import get_primary_key_column +def delete_records_from_table(conn, record_ids, table_oid): + """ + Delete records from table by id. + + Args: + tab_id: The OID of the table whose record we'll delete. + record_ids: A list of primary values + + The table must have a single primary key column. + """ + return db_conn.exec_msar_func( + conn, + 'delete_records_from_table', + table_oid, + json.dumps(record_ids), + ).fetchone()[0] + + def delete_record(table, engine, id_value): primary_key_column = get_primary_key_column(table) query = delete(table).where(primary_key_column == id_value) diff --git a/db/records/operations/insert.py b/db/records/operations/insert.py index c61dacfebb..bec20df1e8 100644 --- a/db/records/operations/insert.py +++ b/db/records/operations/insert.py @@ -5,6 +5,7 @@ from psycopg2 import sql from sqlalchemy.exc import IntegrityError, ProgrammingError from psycopg2.errors import NotNullViolation, ForeignKeyViolation, DatatypeMismatch, UniqueViolation, ExclusionViolation +from db import connection as db_conn from db.columns.exceptions import NotNullError, ForeignKeyError, TypeMismatchError, UniqueValueError, ExclusionError from db.columns.base import MathesarColumn from db.constants import ID, ID_ORIGINAL @@ -15,6 +16,18 @@ READ_SIZE = 20000 +def add_record_to_table(conn, record_def, table_oid, return_record_summaries=False): + """Add a record to a table.""" + result = db_conn.exec_msar_func( + conn, + 'add_record_to_table', + table_oid, + json.dumps(record_def), + return_record_summaries + ).fetchone()[0] + return result + + def insert_record_or_records(table, engine, record_data): """ record_data can be a dictionary, tuple, or list of dictionaries or tuples. diff --git a/db/records/operations/relevance.py b/db/records/operations/relevance.py index 056f65eaac..f20daa3c23 100644 --- a/db/records/operations/relevance.py +++ b/db/records/operations/relevance.py @@ -1,5 +1,6 @@ from sqlalchemy import case, select, desc from db.types import categories +from db.types.base import MathesarCustomType from db.types.operations.convert import get_db_type_enum_from_class WEIGHT_4 = 4 @@ -37,8 +38,8 @@ def _get_scored_selectable(relation, parameters_dict): def _get_col_score_expr(col, param_val): col_type = get_db_type_enum_from_class(col.type.__class__) - - if col_type in categories.STRING_LIKE_TYPES: + searchable_string_types = categories.STRING_LIKE_TYPES | frozenset([MathesarCustomType.URI, MathesarCustomType.EMAIL]) + if col_type in searchable_string_types: score_expr = case( (col.ilike(param_val), WEIGHT_4), (col.ilike(param_val + '%'), WEIGHT_3), diff --git a/db/records/operations/select.py b/db/records/operations/select.py index 4da11f90f5..9ef568d79e 100644 --- a/db/records/operations/select.py +++ b/db/records/operations/select.py @@ -1,6 +1,8 @@ +import json from sqlalchemy import select from sqlalchemy.sql.functions import count +from db import connection as db_conn from db.columns.base import MathesarColumn from db.tables.utils import get_primary_key_column from db.types.operations.cast import get_column_cast_expression @@ -9,6 +11,100 @@ from db.transforms.operations.apply import apply_transformations_deprecated +def list_records_from_table( + conn, + table_oid, + limit=None, + offset=None, + order=None, + filter=None, + group=None, + return_record_summaries=False +): + """ + Get records from a table. + + The order definition objects should have the form + {"attnum": , "direction": } + + Only data from which the user is granted `SELECT` is returned. + + Args: + tab_id: The OID of the table whose records we'll get. + limit: The maximum number of rows we'll return. + offset: The number of rows to skip before returning records from + following rows. + order: An array of ordering definition objects. + filter: An array of filter definition objects. + group: An array of group definition objects. + """ + result = db_conn.exec_msar_func( + conn, + 'list_records_from_table', + table_oid, + limit, + offset, + json.dumps(order) if order is not None else None, + json.dumps(filter) if filter is not None else None, + json.dumps(group) if group is not None else None, + return_record_summaries + ).fetchone()[0] + return result + + +def get_record_from_table( + conn, + record_id, + table_oid, + return_record_summaries=False +): + """ + Get single record from a table by its primary key + + Only data from which the user is granted `SELECT` is returned. + + Args: + record_id: The primary key value of the record. + table_id: The OID of the table whose record we'll get. + """ + result = db_conn.exec_msar_func( + conn, + 'get_record_from_table', + table_oid, + record_id, + return_record_summaries, + ).fetchone()[0] + return result + + +def search_records_from_table( + conn, + table_oid, + search=[], + limit=10, + return_record_summaries=False, +): + """ + Get records from a table, according to a search specification + + Only data from which the user is granted `SELECT` is returned. + + Args: + tab_id: The OID of the table whose records we'll get. + search: A list of dictionaries defining a search. + limit: The maximum number of rows we'll return. + + The search definition objects should have the form + {"attnum": , "literal": } + """ + search = search or [] + result = db_conn.exec_msar_func( + conn, 'search_records_from_table', + table_oid, json.dumps(search), limit, return_record_summaries + ).fetchone()[0] + return result + + def get_record(table, engine, id_value): primary_key_column = get_primary_key_column(table) pg_query = select(table).where(primary_key_column == id_value) diff --git a/db/records/operations/update.py b/db/records/operations/update.py index 1c206669dd..1535251966 100644 --- a/db/records/operations/update.py +++ b/db/records/operations/update.py @@ -1,3 +1,5 @@ +import json +from db import connection as db_conn from db.records.operations.select import get_record from db.tables.utils import get_primary_key_column from sqlalchemy.exc import DataError @@ -5,6 +7,19 @@ from db.records.exceptions import InvalidDate, InvalidDateFormat +def patch_record_in_table(conn, record_def, record_id, table_oid, return_record_summaries=False): + """Update a record in a table.""" + result = db_conn.exec_msar_func( + conn, + 'patch_record_in_table', + table_oid, + record_id, + json.dumps(record_def), + return_record_summaries + ).fetchone()[0] + return result + + def update_record(table, engine, id_value, record_data): primary_key_column = get_primary_key_column(table) with engine.begin() as connection: diff --git a/demo/management/commands/__init__.py b/db/roles/__init__.py similarity index 100% rename from demo/management/commands/__init__.py rename to db/roles/__init__.py diff --git a/release-scripts/__init__.py b/db/roles/operations/__init__.py similarity index 100% rename from release-scripts/__init__.py rename to db/roles/operations/__init__.py diff --git a/db/roles/operations/create.py b/db/roles/operations/create.py new file mode 100644 index 0000000000..afb1460186 --- /dev/null +++ b/db/roles/operations/create.py @@ -0,0 +1,5 @@ +from db.connection import exec_msar_func + + +def create_role(rolename, password, login, conn): + return exec_msar_func(conn, 'create_role', rolename, password, login).fetchone()[0] diff --git a/db/roles/operations/drop.py b/db/roles/operations/drop.py new file mode 100644 index 0000000000..6c426bcbba --- /dev/null +++ b/db/roles/operations/drop.py @@ -0,0 +1,5 @@ +from db.connection import exec_msar_func + + +def drop_role(role_oid, conn): + exec_msar_func(conn, 'drop_role', role_oid) diff --git a/db/roles/operations/membership.py b/db/roles/operations/membership.py new file mode 100644 index 0000000000..4b963653b9 --- /dev/null +++ b/db/roles/operations/membership.py @@ -0,0 +1,5 @@ +from db.connection import exec_msar_func + + +def set_members_to_role(parent_role_oid, members, conn): + return exec_msar_func(conn, 'set_members_to_role', parent_role_oid, members).fetchone()[0] diff --git a/db/roles/operations/ownership.py b/db/roles/operations/ownership.py new file mode 100644 index 0000000000..bdde2248ba --- /dev/null +++ b/db/roles/operations/ownership.py @@ -0,0 +1,13 @@ +from db.connection import exec_msar_func + + +def transfer_database_ownership(new_owner_oid, conn): + return exec_msar_func(conn, 'transfer_database_ownership', new_owner_oid).fetchone()[0] + + +def transfer_schema_ownership(schema_oid, new_owner_oid, conn): + return exec_msar_func(conn, 'transfer_schema_ownership', schema_oid, new_owner_oid).fetchone()[0] + + +def transfer_table_ownership(table_oid, new_owner_oid, conn): + return exec_msar_func(conn, 'transfer_table_ownership', table_oid, new_owner_oid).fetchone()[0] diff --git a/db/roles/operations/select.py b/db/roles/operations/select.py new file mode 100644 index 0000000000..daffc31550 --- /dev/null +++ b/db/roles/operations/select.py @@ -0,0 +1,21 @@ +from db.connection import exec_msar_func + + +def list_roles(conn): + return exec_msar_func(conn, 'list_roles').fetchone()[0] + + +def get_current_role_from_db(conn): + return exec_msar_func(conn, 'get_current_role').fetchone()[0] + + +def list_db_priv(conn): + return exec_msar_func(conn, 'list_db_priv').fetchone()[0] + + +def list_schema_privileges(schema_oid, conn): + return exec_msar_func(conn, 'list_schema_privileges', schema_oid).fetchone()[0] + + +def list_table_privileges(table_oid, conn): + return exec_msar_func(conn, 'list_table_privileges', table_oid).fetchone()[0] diff --git a/db/roles/operations/update.py b/db/roles/operations/update.py new file mode 100644 index 0000000000..900d1aca97 --- /dev/null +++ b/db/roles/operations/update.py @@ -0,0 +1,22 @@ +import json +from db.connection import exec_msar_func + + +def replace_database_privileges_for_roles(conn, privileges): + return exec_msar_func( + conn, 'replace_database_privileges_for_roles', json.dumps(privileges) + ).fetchone()[0] + + +def replace_schema_privileges_for_roles(conn, schema_oid, privileges): + return exec_msar_func( + conn, 'replace_schema_privileges_for_roles', + schema_oid, json.dumps(privileges) + ).fetchone()[0] + + +def replace_table_privileges_for_roles(conn, table_oid, privileges): + return exec_msar_func( + conn, 'replace_table_privileges_for_roles', + table_oid, json.dumps(privileges) + ).fetchone()[0] diff --git a/db/schemas/operations/alter.py b/db/schemas/operations/alter.py index e6b345720f..03f933b491 100644 --- a/db/schemas/operations/alter.py +++ b/db/schemas/operations/alter.py @@ -1,48 +1,34 @@ -from db.connection import execute_msar_func_with_engine +import json -SUPPORTED_SCHEMA_ALTER_ARGS = {'name', 'description'} +from db.connection import execute_msar_func_with_engine, exec_msar_func -def rename_schema(schema_name, engine, rename_to): +def patch_schema_via_sql_alchemy(schema_name, engine, patch): """ - Rename an existing schema. + Patch a schema using a SQLAlchemy engine. Args: schema_name: Name of the schema to change. engine: SQLAlchemy engine object for connecting. - rename_to: New schema name. - - Returns: - Returns a string giving the command that was run. + patch: A dict mapping the following fields to new values: + - 'name' (optional): New name for the schema. + - 'description' (optional): New description for the schema. """ - if rename_to == schema_name: - return - return execute_msar_func_with_engine( - engine, 'rename_schema', schema_name, rename_to - ).fetchone()[0] + execute_msar_func_with_engine(engine, "patch_schema", schema_name, json.dumps(patch)) -def comment_on_schema(schema_name, engine, comment): +def patch_schema(schema_oid, conn, patch): """ - Change description of a schema. + Patch a schema using a psycopg connection. Args: - schema_name: The name of the schema whose comment we will - change. - comment: The new comment. Any quotes or special characters must - be escaped. - engine: SQLAlchemy engine object for connecting. + schema_oid: The OID of the schema to change. + conn: a psycopg connection + patch: A dict mapping the following fields to new values: + - 'name' (optional): New name for the schema. + - 'description' (optional): New description for the schema. Returns: - Returns a string giving the command that was run. + The SchemaInfo describing the user-defined schema in the database. """ - return execute_msar_func_with_engine( - engine, 'comment_on_schema', schema_name, comment - ).fetchone()[0] - - -def alter_schema(name, engine, update_data): - if "description" in update_data: - comment_on_schema(name, engine, update_data["description"]) - if "name" in update_data: - rename_schema(name, engine, update_data["name"]) + return exec_msar_func(conn, "patch_schema", schema_oid, json.dumps(patch)).fetchone()[0] diff --git a/db/schemas/operations/create.py b/db/schemas/operations/create.py index a079ec2cd3..07e6a3f28a 100644 --- a/db/schemas/operations/create.py +++ b/db/schemas/operations/create.py @@ -1,26 +1,54 @@ -from db.schemas.operations.alter import comment_on_schema -from db.connection import execute_msar_func_with_engine +from db.connection import execute_msar_func_with_engine, exec_msar_func -def create_schema(schema_name, engine, comment=None, if_not_exists=False): +def create_schema_via_sql_alchemy(schema_name, engine, description=None): """ - Creates a schema. + Creates a schema using a SQLAlchemy engine. Args: schema_name: Name of the schema to create. engine: SQLAlchemy engine object for connecting. - comment: The new comment. Any quotes or special characters must - be escaped. - if_not_exists: Whether to ignore an error if the schema does - exist. + description: A new description to set on the schema. + + If a schema already exists with the given name, this function will raise an error. Returns: - Returns a string giving the command that was run. + The integer oid of the newly created schema. """ - result = execute_msar_func_with_engine( - engine, 'create_schema', schema_name, if_not_exists + return execute_msar_func_with_engine( + engine, 'create_schema', schema_name, None, description ).fetchone()[0] - if comment: - comment_on_schema(schema_name, engine, comment) - return result + +def create_schema_if_not_exists_via_sql_alchemy(schema_name, engine): + """ + Ensure that a schema exists using a SQLAlchemy engine. + + Args: + schema_name: Name of the schema to create. + engine: SQLAlchemy engine object for connecting. + + Returns: + The integer oid of the newly created schema. + """ + return execute_msar_func_with_engine( + engine, 'create_schema_if_not_exists', schema_name + ).fetchone()[0] + + +def create_schema(schema_name, conn, owner_oid, description=None): + """ + Create a schema using a psycopg connection. + + Args: + schema_name: Name of the schema to create. + conn: a psycopg connection + owner_oid: The OID of the role who will own the new schema.(optional) + description: A new description to set on the schema.(optional) + + If a schema already exists with the given name, this function will raise an error. + + Returns: + The SchemaInfo describing the user-defined schema in the database. + """ + return exec_msar_func(conn, 'create_schema', schema_name, owner_oid, description).fetchone()[0] diff --git a/db/schemas/operations/drop.py b/db/schemas/operations/drop.py index 2b78e60a49..919ce352bb 100644 --- a/db/schemas/operations/drop.py +++ b/db/schemas/operations/drop.py @@ -1,20 +1,33 @@ -from db.connection import execute_msar_func_with_engine +from db.connection import execute_msar_func_with_engine, exec_msar_func -def drop_schema(schema_name, engine, cascade=False, if_exists=False): +def drop_schema_via_name(engine, name, cascade=False): """ - Drop a schema. + Drop a schema by its name. + + If no schema exists with the given name, an exception will be raised. + + Deprecated: + Use drop_schema_via_oid instead. This function is deprecated because we + are phasing out name-based operations in favor of OID-based operations + and we are phasing out SQLAlchemy in favor of psycopg. Args: - schema_name: Name of the schema to drop. - engine: SQLAlchemy engine object for connecting. - cascade: Whether to drop the dependent objects. - if_exists: Whether to ignore an error if the schema doesn't - exist. + engine: SQLAlchemy engine object for connecting. name: Name of the + schema to drop. cascade: Whether to drop the dependent objects. + """ + execute_msar_func_with_engine(engine, 'drop_schema', name, cascade).fetchone() + - Returns: - Returns a string giving the command that was run. +def drop_schema_via_oid(conn, id, cascade=False): + """ + Drop a schema by its OID. + + If no schema exists with the given oid, an exception will be raised. + + Args: + conn: a psycopg connection + id: the OID of the schema to drop. + cascade: Whether to drop the dependent objects. """ - return execute_msar_func_with_engine( - engine, 'drop_schema', schema_name, cascade, if_exists - ).fetchone()[0] + exec_msar_func(conn, 'drop_schema', id, cascade).fetchone() diff --git a/db/schemas/operations/select.py b/db/schemas/operations/select.py index 2a40e8666c..973876909a 100644 --- a/db/schemas/operations/select.py +++ b/db/schemas/operations/select.py @@ -1,23 +1,17 @@ from sqlalchemy import select, and_, not_, or_, func -from db import constants -from db import types +from db.constants import INTERNAL_SCHEMAS from db.utils import get_pg_catalog_table from db.metadata import get_empty_metadata +from db.connection import exec_msar_func -TYPES_SCHEMA = types.base.SCHEMA -TEMP_INFER_SCHEMA = constants.INFERENCE_SCHEMA -MSAR_PUBLIC = constants.MSAR_PUBLIC -MSAR_PRIVAT = constants.MSAR_PRIVAT -MSAR_VIEWS = constants.MSAR_VIEWS -EXCLUDED_SCHEMATA = [ - "information_schema", - MSAR_PRIVAT, - MSAR_PUBLIC, - MSAR_VIEWS, - TEMP_INFER_SCHEMA, - TYPES_SCHEMA, -] + +def list_schemas(conn): + return exec_msar_func(conn, 'list_schemas').fetchone()[0] + + +def get_schema(schema_oid, conn): + return exec_msar_func(conn, 'get_schema').fetchone()[0] def reflect_schema(engine, name=None, oid=None, metadata=None): @@ -46,7 +40,8 @@ def get_mathesar_schemas_with_oids(engine): select(pg_namespace.c.nspname.label('schema'), pg_namespace.c.oid) .where( and_( - *[pg_namespace.c.nspname != schema for schema in EXCLUDED_SCHEMATA], + *[pg_namespace.c.nspname != schema for schema in INTERNAL_SCHEMAS], + pg_namespace.c.nspname != "information_schema", not_(pg_namespace.c.nspname.like("pg_%")) ) ) diff --git a/db/sql/00_msar.sql b/db/sql/00_msar.sql new file mode 100644 index 0000000000..dea812a9ba --- /dev/null +++ b/db/sql/00_msar.sql @@ -0,0 +1,5160 @@ +CREATE SCHEMA IF NOT EXISTS __msar; +CREATE SCHEMA IF NOT EXISTS msar; + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- GENERAL DDL FUNCTIONS +-- +-- Functions in this section are quite general, and are the basis of the others. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +__msar.exec_ddl(command text) RETURNS text AS $$/* +Execute the given command, returning the command executed. + +Not useful for SELECTing from tables. Most useful when you're performing DDL. + +Args: + command: Raw string that will be executed as a command. +*/ +BEGIN + EXECUTE command; + RETURN command; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.exec_ddl(command_template text, arguments variadic anyarray) RETURNS text AS $$/* +Execute a templated command, returning the command executed. + +The template is given in the first argument, and all further arguments are used to fill in the +template. Not useful for SELECTing from tables. Most useful when you're performing DDL. + +Args: + command_template: Raw string that will be executed as a command. + arguments: arguments that will be used to fill in the template. +*/ +DECLARE formatted_command TEXT; +BEGIN + formatted_command := format(command_template, VARIADIC arguments); + RETURN __msar.exec_ddl(formatted_command); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.build_text_tuple(text[]) RETURNS text AS $$ +SELECT '(' || string_agg(col, ', ') || ')' FROM unnest($1) x(col); +$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- GENERAL DQL FUNCTIONS +-- +-- Functions in this section are quite general, and are the basis of the others. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +__msar.exec_dql(command text) RETURNS jsonb AS $$/* +Execute the given command, returning a JSON object describing the records in the following form: +[ + {"id": 1, "col1_name": "value1", "col2_name": "value2"}, + {"id": 2, "col1_name": "value1", "col2_name": "value2"}, + {"id": 3, "col1_name": "value1", "col2_name": "value2"}, + ... +] + +Useful for SELECTing from tables. Most useful when you're performing DQL. + +Note that you must include the primary key column(`id` in case of a Mathesar table) in the +command_template if you want the returned records to be uniquely identifiable. + +Args: + command: Raw string that will be executed as a command. +*/ +DECLARE + records jsonb; +BEGIN + EXECUTE 'WITH cte AS (' || command || ') + SELECT jsonb_agg(row_to_json(cte.*)) FROM cte' INTO records; + RETURN records; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.exec_dql(command_template text, arguments variadic anyarray) RETURNS jsonb AS $$/* +Execute a templated command, returning a JSON object describing the records in the following form: +[ + {"id": 1, "col1_name": "value1", "col2_name": "value2"}, + {"id": 2, "col1_name": "value1", "col2_name": "value2"}, + {"id": 3, "col1_name": "value1", "col2_name": "value2"}, + ... +] + +The template is given in the first argument, and all further arguments are used to fill in the +template. Useful for SELECTing from tables. Most useful when you're performing DQL. + +Note that you must include the primary key column(`id` in case of a Mathesar table) in the +command_template if you want the returned records to be uniquely identifiable. + +Args: + command_template: Raw string that will be executed as a command. + arguments: arguments that will be used to fill in the template. +*/ +DECLARE formatted_command TEXT; +BEGIN + formatted_command := format(command_template, VARIADIC arguments); + RETURN __msar.exec_dql(formatted_command); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- INFO FUNCTIONS +-- +-- Functions in this section get information about a given schema, table or column. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION msar.col_description(tab_id oid, col_id integer) RETURNS text AS $$/* +Transparent wrapper for col_description. Putting it in the `msar` namespace helps route all DB calls +from Python through a single Python module. +*/ + BEGIN + RETURN col_description(tab_id, col_id); + END +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION msar.obj_description(obj_id oid, catalog_name text) RETURNS text AS $$/* +Transparent wrapper for obj_description. Putting it in the `msar` namespace helps route all DB calls +from Python through a single Python module. +*/ + BEGIN + RETURN obj_description(obj_id, catalog_name); + END +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __msar.jsonb_key_exists(data jsonb, key text) RETURNS boolean AS $$/* +Wraps the `?` jsonb operator for improved readability. +*/ + BEGIN + RETURN data ? key; + END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION msar.schema_exists(schema_name text) RETURNS boolean AS $$/* +Return true if the schema exists, false otherwise. + +Args : + sch_name: The name of the schema, UNQUOTED. +*/ +SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname=schema_name); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_schema_oid(sch_name text) RETURNS oid AS $$/* +Return the OID of a schema, or NULL if the schema does not exist. + +Args : + sch_name: The name of the schema, UNQUOTED. +*/ +SELECT oid FROM pg_namespace WHERE nspname=sch_name; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_schema_name(sch_id oid) RETURNS TEXT AS $$/* +Return the UNQUOTED name for a given schema. + +Raises an exception if the schema is not found. + +Args: + sch_id: The OID of the schema. +*/ +DECLARE sch_name text; +BEGIN + SELECT nspname INTO sch_name FROM pg_namespace WHERE oid=sch_id; + + IF sch_name IS NULL THEN + RAISE EXCEPTION 'No schema with OID % exists.', sch_id + USING ERRCODE = '3F000'; -- invalid_schema_name + END IF; + + RETURN sch_name; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +__msar.build_qualified_name_sql(sch_name text, obj_name text) RETURNS text AS $$/* +Return the fully-qualified, properly quoted, name for a given database object (e.g., table). + +Args: + sch_name: The schema of the object, unquoted. + obj_name: The name of the object, unqualified and unquoted. +*/ +BEGIN + RETURN format('%I.%I', sch_name, obj_name); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.get_qualified_relation_name(rel_id oid) RETURNS text AS $$/* +Return the name for a given relation (e.g., table), qualified or quoted as appropriate. + +In cases where the relation is already included in the search path, the returned name will not be +fully-qualified. + +The relation *must* be in the pg_class table to use this function. + +Args: + rel_id: The OID of the relation. +*/ +BEGIN + RETURN rel_id::regclass::text; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.get_qualified_relation_name_or_null(rel_id oid) RETURNS text AS $$/* +Return the name for a given relation (e.g., table), qualified or quoted as appropriate. + +In cases where the relation is already included in the search path, the returned name will not be +fully-qualified. + +The relation *must* be in the pg_class table to use this function. This function will return NULL if +no corresponding relation can be found. + +Args: + rel_id: The OID of the relation. +*/ +SELECT CASE + WHEN EXISTS (SELECT oid FROM pg_catalog.pg_class WHERE oid=rel_id) THEN rel_id::regclass::text +END +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_relation_name(rel_oid oid) RETURNS TEXT AS $$/* +Return the UNQUOTED name of a given relation (e.g., table). + +If the relation does not exist, an exception will be raised. + +Args: + rel_oid: The OID of the relation. +*/ +DECLARE rel_name text; +BEGIN + SELECT relname INTO rel_name FROM pg_class WHERE oid=rel_oid; + + IF rel_name IS NULL THEN + RAISE EXCEPTION 'Relation with OID % does not exist', rel_oid + USING ERRCODE = '42P01'; -- undefined_table + END IF; + + RETURN rel_name; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_relation_schema_name(rel_oid oid) RETURNS TEXT AS $$/* +Return the UNQUOTED name of the schema which contains a given relation (e.g., table). + +If the relation does not exist, an exception will be raised. + +Args: + rel_oid: The OID of the relation. +*/ +DECLARE sch_name text; +BEGIN + SELECT n.nspname INTO sch_name + FROM pg_catalog.pg_class c + JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE c.oid = rel_oid; + + IF sch_name IS NULL THEN + RAISE EXCEPTION 'Relation with OID % does not exist', rel_oid + USING ERRCODE = '42P01'; -- undefined_table + END IF; + + RETURN sch_name; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +DROP FUNCTION IF EXISTS msar.get_relation_oid(text, text) CASCADE; +CREATE OR REPLACE FUNCTION +msar.get_relation_oid(sch_name text, rel_name text) RETURNS oid AS $$/* +Return the OID for a given relation (e.g., table). + +The relation *must* be in the pg_class table to use this function. + +Args: + sch_name: The schema of the relation, unquoted. + rel_name: The name of the relation, unqualified and unquoted. +*/ +BEGIN + RETURN __msar.build_qualified_name_sql(sch_name, rel_name)::regclass::oid; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_relation_namespace_oid(rel_id oid) RETURNS oid AS $$/* +Get the OID of the namespace containing the given relation. + +Most useful for getting the OID of the schema of a given table. + +Args: + rel_id: The OID of the relation whose namespace we want to find. +*/ +SELECT relnamespace FROM pg_class WHERE oid=rel_id; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + + +CREATE OR REPLACE FUNCTION +msar.get_column_name(rel_id oid, col_id integer) RETURNS text AS $$/* +Return the UNQUOTED name for a given column in a given relation (e.g., table). + +More precisely, this function returns the name of attributes of any relation appearing in the +pg_class catalog table (so you could find attributes of indices with this function). + +Args: + rel_id: The OID of the relation. + col_id: The attnum of the column in the relation. +*/ +SELECT attname::text FROM pg_attribute WHERE attrelid=rel_id AND attnum=col_id; +$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_column_name(rel_id oid, col_name text) RETURNS text AS $$/* +Return the UNQUOTED name for a given column in a given relation (e.g., table). + +More precisely, this function returns the unquoted name of attributes of any relation appearing in the +pg_class catalog table (so you could find attributes of indices with this function). If the given +col_name is not in the relation, we return null. + +This has the effect of both quoting and preparing the given col_name, and also validating that it +exists. + +Args: + rel_id: The OID of the relation. + col_name: The unquoted name of the column in the relation. +*/ +SELECT attname::text FROM pg_attribute WHERE attrelid=rel_id AND attname=col_name; +$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.get_column_names(rel_id oid, columns jsonb) RETURNS text[] AS $$/* +Return the QUOTED names for given columns in a given relation (e.g., table). + +- If the rel_id is given as 0, the assumption is that this is a new table, so we just apply normal +quoting rules to a column without validating anything further. +- If the rel_id is given as nonzero, and a column is given as text, then we validate that + the column name exists in the table, and use that. +- If the rel_id is given as nonzero, and the column is given as a number, then we look the column up + by attnum and use that name. + +The columns jsonb can have a mix of numerical IDs and column names. The reason for this is that we +may be adding a column algorithmically, and this saves having to modify the column adding logic +based on the IDs passed by the user for given columns. + +Args: + rel_id: The OID of the relation. + columns: A JSONB array of the unquoted names or IDs (can be mixed) of the columns. +*/ +SELECT array_agg( + CASE + WHEN rel_id=0 THEN quote_ident(col #>> '{}') + WHEN jsonb_typeof(col)='number' THEN quote_ident(msar.get_column_name(rel_id, col::integer)) + WHEN jsonb_typeof(col)='string' THEN quote_ident(msar.get_column_name(rel_id, col #>> '{}')) + END +) +FROM jsonb_array_elements(columns) AS x(col); +$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_attnum(rel_id oid, att_name text) RETURNS smallint AS $$/* +Get the attnum for a given attribute in the relation. Returns null if no such attribute exists. + +Usually, this will be used to get the attnum for a column of a table. + +Args: + rel_id: The relation where we'll look for the attribute. + att_name: The name of the attribute, unquoted. +*/ +SELECT attnum FROM pg_attribute WHERE attrelid=rel_id AND attname=att_name; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.is_pkey_col(rel_id oid, col_id integer) RETURNS boolean AS $$/* +Return whether the given column is in the primary key of the given relation (e.g., table). + +Args: + rel_id: The OID of the relation. + col_id: The attnum of the column in the relation. +*/ +SELECT EXISTS ( + SELECT 1 FROM pg_constraint WHERE + ARRAY[col_id::smallint] <@ conkey AND conrelid=rel_id AND contype='p' +); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_selectable_pkey_attnum(rel_id regclass) RETURNS smallint AS $$/* +Get the attnum of the single-column primary key for a relation if it has one. If not, return null. + +The attnum will only be returned if the current user has SELECT on that column. + +Args: + rel_id: The OID of the relation. +*/ +SELECT conkey[1] FROM pg_constraint +WHERE + conrelid = rel_id + AND cardinality(conkey) = 1 + AND contype='p' + AND has_column_privilege(rel_id, conkey[1], 'SELECT'); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.is_default_possibly_dynamic(tab_id oid, col_id integer) RETURNS boolean AS $$/* +Determine whether the default value for the given column is an expression or constant. + +If the column default is an expression, then we return 'True', since that could be dynamic. If the +column default is a simple constant, we return 'False'. The check is not very sophisticated, and +errs on the side of returning 'True'. We simply pull apart the pg_node_tree representation of the +expression, and check whether the root node is a known function call type. Note that we do *not* +search any deeper in the tree than the root node. This means we won't notice that some expressions +are actually constant (or at least static), if they have a function call or operator as their root +node. + +For example, the following would return 'True', even though they're not dynamic: + 3 + 5 + mathesar_types.cast_to_integer('8') + +Args: + tab_id: The OID of the table with the column. + col_id: The attnum of the column in the table. +*/ +SELECT + -- This is a typical dynamic default like NOW() or CURRENT_DATE + (split_part(substring(adbin, 2), ' ', 1) IN (('SQLVALUEFUNCTION'), ('FUNCEXPR'))) + OR + -- This is an identity column `GENERATED {ALWAYS | DEFAULT} AS IDENTITY` + (attidentity <> '') + OR + -- Other generated columns show up here. + (attgenerated <> '') +FROM pg_attribute LEFT JOIN pg_attrdef ON attrelid=adrelid AND attnum=adnum +WHERE attrelid=tab_id AND attnum=col_id; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.is_mathesar_id_column(tab_id oid, col_id integer) RETURNS boolean AS $$/* +Determine whether the given column is our default Mathesar ID column. + +The column in question is always attnum 1, and is created with the string + + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY + +Args: + tab_id: The OID of the table whose column we'll check + col_id: The attnum of the column in question +*/ +SELECT col_id=1 AND attname='id' AND atttypid='integer'::regtype::oid AND attidentity <> '' +FROM pg_attribute WHERE attrelid=tab_id AND attnum=col_id; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_cast_function_name(target_type regtype) RETURNS text AS $$/* +Return a string giving the appropriate name of the casting function for the target_type. + +Currently set up to duplicate the logic in our python casting function builder. This will be +changed. Given a qualified, potentially capitalized type name, we +- Remove the namespace (schema), +- Replace any white space in the type name with underscores, +- Replace double quotes in the type name (e.g., the "char" type) with '_double_quote_' +- Use the prepped type name in the name `mathesar_types.cast_to_%s`. + +Args: + target_type: This should be a type that exists. +*/ +DECLARE target_type_prepped text; +BEGIN + -- TODO: Come up with a way to build these names that is more robust against collisions. + WITH unqualifier AS ( + SELECT x[array_upper(x, 1)] unqualified_type + FROM regexp_split_to_array(target_type::text, '\.') x + ), unspacer AS( + SELECT replace(unqualified_type, ' ', '_') unspaced_type + FROM unqualifier + ) + SELECT replace(unspaced_type, '"', '_double_quote_') + FROM unspacer + INTO target_type_prepped; + RETURN format('mathesar_types.cast_to_%s', target_type_prepped); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_database_name(dat_id oid) RETURNS TEXT AS $$/* +Return the UNQUOTED name of a given database. + +If the database does not exist, an exception will be raised. + +Args: + dat_id: The OID of the role. +*/ +DECLARE dat_name text; +BEGIN + SELECT datname INTO dat_name FROM pg_catalog.pg_database WHERE oid=dat_id; + + IF dat_name IS NULL THEN + RAISE EXCEPTION 'Database with OID % does not exist', dat_id + USING ERRCODE = '42704'; -- undefined_object + END IF; + + RETURN dat_name; +END; +$$ LANGUAGE plpgsql STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_role_name(rol_oid oid) RETURNS TEXT AS $$/* +Return the UNQUOTED name of a given role. + +If the role does not exist, an exception will be raised. + +Args: + rol_oid: The OID of the role. +*/ +DECLARE rol_name text; +BEGIN + SELECT rolname INTO rol_name FROM pg_catalog.pg_roles WHERE oid=rol_oid; + + IF rol_name IS NULL THEN + RAISE EXCEPTION 'Role with OID % does not exist', rol_oid + USING ERRCODE = '42704'; -- undefined_object + END IF; + + RETURN rol_name; +END; +$$ LANGUAGE plpgsql STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_constraint_type_api_code(contype char) RETURNS TEXT AS $$/* +This function returns a string that represents the constraint type code used to describe +constraints when listing them within the Mathesar API. + +PostgreSQL constraint types are documented by the `contype` field here: +https://www.postgresql.org/docs/current/catalog-pg-constraint.html + +Notably, we don't include 't' (trigger) because triggers a bit different structurally and we don't +support working with them (yet?) in Mathesar. +*/ +SELECT CASE contype + WHEN 'c' THEN 'check' + WHEN 'f' THEN 'foreignkey' + WHEN 'p' THEN 'primary' + WHEN 'u' THEN 'unique' + WHEN 'x' THEN 'exclude' +END; +$$ LANGUAGE SQL; + + +DROP FUNCTION IF EXISTS msar.get_constraints_for_table(oid); +CREATE OR REPLACE FUNCTION msar.get_constraints_for_table(tab_id oid) RETURNS TABLE +( + oid oid, + name text, + type text, + columns smallint[], + referent_table_oid oid, + referent_columns smallint[] +) +AS $$/* +Return data describing the constraints set on a given table. + +Args: + tab_id: The OID of the table. +*/ +WITH constraints AS ( + SELECT + oid, + conname AS name, + msar.get_constraint_type_api_code(contype::char) AS type, + conkey AS columns, + confrelid AS referent_table_oid, + confkey AS referent_columns + FROM pg_catalog.pg_constraint + WHERE conrelid = tab_id +) +SELECT * +FROM constraints +-- Only return constraints with types that we're able to classify +WHERE type IS NOT NULL +$$ LANGUAGE SQL; + + +CREATE OR REPLACE FUNCTION +msar.get_constraint_name(con_id oid) RETURNS text AS $$/* +Return the UNQUOTED constraint name of the corresponding constraint oid. + +Args: + con_id: The OID of the constraint. +*/ +BEGIN + RETURN conname::text FROM pg_constraint WHERE pg_constraint.oid = con_id; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_pk_column(rel_id oid) RETURNS smallint AS $$/* +Return the first column attnum in the primary key of a given relation (e.g., table). + +Args: + rel_id: The OID of the relation. +*/ +SELECT CASE WHEN array_length(conkey, 1) = 1 THEN conkey[1] END +FROM pg_constraint +WHERE contype='p' +AND conrelid=rel_id; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_pk_column(sch_name text, rel_name text) RETURNS smallint AS $$/* +Return the first column attnum in the primary key of a given relation (e.g., table). + +Args: + sch_name: The schema of the relation, unquoted. + rel_name: The name of the relation, unqualified and unquoted. +*/ +SELECT CASE WHEN array_length(conkey, 1) = 1 THEN conkey[1] END +FROM pg_constraint +WHERE contype='p' +AND conrelid=msar.get_relation_oid(sch_name, rel_name); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_column_type(rel_id oid, col_id smallint) RETURNS text AS $$/* +Return the type of a given column in a relation. + +Args: + rel_id: The OID of the relation. + col_id: The attnum of the column in the relation. +*/ +SELECT atttypid::regtype +FROM pg_attribute +WHERE attnum = col_id +AND attrelid = rel_id; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_column_type(sch_name text, rel_name text, col_name text) RETURNS text AS $$/* +Return the type of a given column in a relation. + +Args: + sch_name: The schema of the relation, unquoted. + rel_name: The name of the relation, unqualified and unquoted. + col_name: The name of the column in the relation, unquoted. +*/ +SELECT atttypid::regtype +FROM pg_attribute +WHERE attname = quote_ident(col_name) +AND attrelid = msar.get_relation_oid(sch_name, rel_name); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_interval_fields(typ_mod integer) RETURNS text AS $$/* +Return the string giving the fields for an interval typmod integer. + +This logic is ported from the relevant PostgreSQL source code, reimplemented in SQL. See the +`intervaltypmodout` function at +https://doxygen.postgresql.org/backend_2utils_2adt_2timestamp_8c.html + +Args: + typ_mod: The atttypmod from the pg_attribute table. Should be valid for the interval type. +*/ +SELECT CASE (typ_mod >> 16 & 32767) + WHEN 1 << 2 THEN 'year' + WHEN 1 << 1 THEN 'month' + WHEN 1 << 3 THEN 'day' + WHEN 1 << 10 THEN 'hour' + WHEN 1 << 11 THEN 'minute' + WHEN 1 << 12 THEN 'second' + WHEN (1 << 2) | (1 << 1) THEN 'year to month' + WHEN (1 << 3) | (1 << 10) THEN 'day to hour' + WHEN (1 << 3) | (1 << 10) | (1 << 11) THEN 'day to minute' + WHEN (1 << 3) | (1 << 10) | (1 << 11) | (1 << 12) THEN 'day to second' + WHEN (1 << 10) | (1 << 11) THEN 'hour to minute' + WHEN (1 << 10) | (1 << 11) | (1 << 12) THEN 'hour to second' + WHEN (1 << 11) | (1 << 12) THEN 'minute to second' +END; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_type_options(typ_id regtype, typ_mod integer, typ_ndims integer) RETURNS jsonb AS $$/* +Return the type options calculated from a type, typmod pair. + +This function uses a number of hard-coded constants. The form of the returned object is determined +by the input type, but the keys will be a subset of: + precision: the precision of a numeric or interval type. See PostgreSQL docs for details. + scale: the scale of a numeric type + fields: See PostgreSQL documentation of the `interval` type. + length: Applies to "text" types where the user can specify the length. + item_type: Gives the type of array members for array-types + +Args: + typ_id: an OID or valid type representing string will work here. + typ_mod: The integer corresponding to the type options; see pg_attribute catalog table. + typ_ndims: Used to determine whether the type is actually an array without an extra join. +*/ +SELECT nullif( + CASE + WHEN typ_id = ANY('{numeric, _numeric}'::regtype[]) THEN + jsonb_build_object( + -- This calculation is modified from the relevant PostgreSQL source code. See the function + -- numeric_typmod_precision(int32) at + -- https://doxygen.postgresql.org/backend_2utils_2adt_2numeric_8c.html + 'precision', ((nullif(typ_mod, -1) - 4) >> 16) & 65535, + -- This calculation is from numeric_typmod_scale(int32) at the same location + 'scale', (((nullif(typ_mod, -1) - 4) & 2047) # 1024) - 1024 + ) + WHEN typ_id = ANY('{interval, _interval}'::regtype[]) THEN + jsonb_build_object( + 'precision', nullif(typ_mod & 65535, 65535), + 'fields', msar.get_interval_fields(typ_mod) + ) + WHEN typ_id = ANY('{bpchar, _bpchar, varchar, _varchar}'::regtype[]) THEN + -- For char and varchar types, the typemod is equal to 4 more than the set length. + jsonb_build_object('length', nullif(typ_mod, -1) - 4) + WHEN typ_id = ANY( + '{bit, varbit, time, timetz, timestamp, timestamptz}'::regtype[] + || '{_bit, _varbit, _time, _timetz, _timestamp, _timestamptz}'::regtype[] + ) THEN + -- For all these types, the typmod is equal to the precision. + jsonb_build_object( + 'precision', nullif(typ_mod, -1) + ) + ELSE jsonb_build_object() + END + || CASE + WHEN typ_ndims>0 THEN + -- This string wrangling is debatably dubious, but avoids a slow join. + jsonb_build_object('item_type', rtrim(typ_id::regtype::text, '[]')) + ELSE '{}' + END, + '{}' +) +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_valid_target_type_strings(typ_id regtype) RETURNS jsonb AS $$/* +Given a source type, return the target types for which Mathesar provides a casting function. + +Args: + typ_id: The type we're casting from. +*/ + +SELECT jsonb_agg(prorettype::regtype::text) +FROM pg_proc +WHERE + pronamespace=msar.get_schema_oid('mathesar_types') + AND proargtypes[0]=typ_id + AND left(proname, 5) = 'cast_'; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.has_dependents(rel_id oid, att_id smallint) RETURNS boolean AS $$/* +Return a boolean according to whether the column identified by the given oid, attnum pair is +referenced (i.e., would dropping that column require CASCADE?). + +Args: + rel_id: The relation of the attribute. + att_id: The attnum of the attribute in the relation. +*/ +SELECT EXISTS ( + SELECT 1 FROM pg_depend WHERE refobjid=rel_id AND refobjsubid=att_id AND deptype='n' +); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_fkey_map_table(tab_id oid) + RETURNS TABLE (target_oid oid, conkey smallint, confkey smallint) +AS $$/* +Generate a table mapping foreign key values from refererrer to referant tables. + +Given an input table (identified by OID), we return a table with each row representing a foreign key +constraint on that table. We return only single-column foreign keys, and only one per foreign key +column. + +Args: + tab_id: The OID of the table containing the foreign key columns to map. +*/ +SELECT DISTINCT ON (conkey) pgc.confrelid AS target_oid, x.conkey AS conkey, y.confkey AS confkey +FROM pg_constraint pgc, LATERAL unnest(conkey) x(conkey), LATERAL unnest(confkey) y(confkey) +WHERE + pgc.conrelid = tab_id + AND pgc.contype='f' + AND cardinality(pgc.confkey) = 1 + AND has_column_privilege(tab_id, x.conkey, 'SELECT') + AND has_column_privilege(pgc.confrelid, y.confkey, 'SELECT') +ORDER BY conkey, target_oid, confkey; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.list_column_privileges_for_current_role(tab_id regclass, attnum smallint) RETURNS jsonb AS $$/* +Return a JSONB array of all privileges current_user holds on the passed table. +*/ +SELECT coalesce(jsonb_agg(privilege), '[]'::jsonb) +FROM + unnest(ARRAY['SELECT', 'INSERT', 'UPDATE', 'REFERENCES']) AS x(privilege), + pg_catalog.has_column_privilege(tab_id, attnum, privilege) as has_privilege +WHERE has_privilege; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_column_info(tab_id regclass) RETURNS jsonb AS $$/* +Given a table identifier, return an array of objects describing the columns of the table. + +Each returned JSON object in the array will have the form: + { + "id": , + "name": , + "type": , + "type_options": , + "nullable": , + "primary_key": , + "default": {"value": , "is_dynamic": }, + "has_dependents": , + "description": , + "current_role_priv": [, , ...], + "valid_target_types": [, , ...] + } + +The `type_options` object is described in the docstring of `msar.get_type_options`. The `default` +object has the keys: + value: A string giving the value (as an SQL expression) of the default. + is_dynamic: A boolean giving whether the default is (likely to be) dynamic. +*/ +SELECT jsonb_agg( + jsonb_build_object( + 'id', attnum, + 'name', attname, + 'type', CASE WHEN attndims>0 THEN '_array' ELSE atttypid::regtype::text END, + 'type_options', msar.get_type_options(atttypid, atttypmod, attndims), + 'nullable', NOT attnotnull, + 'primary_key', COALESCE(pgi.indisprimary, false), + 'default', + nullif( + jsonb_strip_nulls( + jsonb_build_object( + 'value', + CASE + WHEN attidentity='' THEN pg_get_expr(adbin, tab_id) + ELSE 'identity' + END, + 'is_dynamic', msar.is_default_possibly_dynamic(tab_id, attnum) + ) + ), + jsonb_build_object() + ), + 'has_dependents', msar.has_dependents(tab_id, attnum), + 'description', msar.col_description(tab_id, attnum), + 'current_role_priv', msar.list_column_privileges_for_current_role(tab_id, attnum), + 'valid_target_types', msar.get_valid_target_type_strings(atttypid) + ) +) +FROM pg_attribute pga + LEFT JOIN pg_index pgi ON pga.attrelid=pgi.indrelid AND pga.attnum=ANY(pgi.indkey) + LEFT JOIN pg_attrdef pgd ON pga.attrelid=pgd.adrelid AND pga.attnum=pgd.adnum +WHERE pga.attrelid=tab_id AND pga.attnum > 0 and NOT attisdropped; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.column_exists(tab_id oid, col_name text) RETURNS boolean AS $$/* +Return true if the given column exists in the table, false otherwise. +*/ +SELECT EXISTS (SELECT 1 FROM pg_attribute WHERE attrelid=tab_id AND attname=col_name); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.list_table_privileges_for_current_role(tab_id regclass) RETURNS jsonb AS $$/* +Return a JSONB array of all privileges current_user holds on the passed table. +*/ +SELECT coalesce(jsonb_agg(privilege), '[]'::jsonb) +FROM + unnest( + ARRAY['SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER'] + ) AS x(privilege), + pg_catalog.has_table_privilege(tab_id, privilege) as has_privilege +WHERE has_privilege; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_table(tab_id regclass) RETURNS jsonb AS $$/* +Given a table identifier, return a JSON object describing the table. + +Each returned JSON object will have the form: + { + "oid": , + "name": , + "schema": , + "description": , + "owner_oid": , + "current_role_priv": [], + "current_role_owns": + } + +Args: + tab_id: The OID or name of the table. +*/ +SELECT jsonb_build_object( + 'oid', oid::bigint, + 'name', relname, + 'schema', relnamespace::bigint, + 'description', msar.obj_description(oid, 'pg_class'), + 'owner_oid', relowner::bigint, + 'current_role_priv', msar.list_table_privileges_for_current_role(tab_id), + 'current_role_owns', pg_catalog.pg_has_role(relowner, 'USAGE') +) FROM pg_catalog.pg_class WHERE oid = tab_id; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_table_info(sch_id regnamespace) RETURNS jsonb AS $$/* +Given a schema identifier, return an array of objects describing the tables of the schema. + +Each returned JSON object in the array will have the form: + { + "oid": , + "name": , + "schema": , + "description": , + "owner_oid": , + "current_role_priv": [], + "current_role_owns": + } + +Args: + sch_id: The OID or name of the schema. +*/ +SELECT coalesce( + jsonb_agg( + jsonb_build_object( + 'oid', pgc.oid::bigint, + 'name', pgc.relname, + 'schema', pgc.relnamespace::bigint, + 'description', msar.obj_description(pgc.oid, 'pg_class'), + 'owner_oid', pgc.relowner::bigint, + 'current_role_priv', msar.list_table_privileges_for_current_role(pgc.oid), + 'current_role_owns', pg_catalog.pg_has_role(pgc.relowner, 'USAGE') + ) + ), + '[]'::jsonb +) +FROM pg_catalog.pg_class AS pgc + LEFT JOIN pg_catalog.pg_namespace AS pgn ON pgc.relnamespace = pgn.oid +WHERE pgc.relnamespace = sch_id AND pgc.relkind = 'r'; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.list_schema_privileges_for_current_role(sch_id regnamespace) RETURNS jsonb AS $$/* +Return a JSONB array of all privileges current_user holds on the passed schema. +*/ +SELECT coalesce(jsonb_agg(privilege), '[]'::jsonb) +FROM + unnest( + ARRAY['USAGE', 'CREATE'] + ) AS x(privilege), + pg_catalog.has_schema_privilege(sch_id, privilege) as has_privilege +WHERE has_privilege; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.schema_info_table() RETURNS TABLE +( + oid bigint, -- The OID of the schema. + name name, -- Name of the role. + description text, -- The description of the schema on the database. + owner_oid bigint, -- The owner of the schema. + current_role_priv jsonb, -- Privileges of the current role on the schema. + current_role_owns boolean, -- Whether the current role owns the schema. + table_count integer -- The number of tables in the schema. +) AS $$ +SELECT + s.oid::bigint AS oid, + s.nspname AS name, + pg_catalog.obj_description(s.oid) AS description, + s.nspowner::bigint AS owner_oid, + msar.list_schema_privileges_for_current_role(s.oid) AS current_role_priv, + pg_catalog.pg_has_role(s.nspowner, 'USAGE') AS current_role_owns, + COALESCE(count(c.oid), 0) AS table_count +FROM pg_catalog.pg_namespace s +LEFT JOIN pg_catalog.pg_class c ON c.relnamespace = s.oid AND c.relkind = 'r' +GROUP BY + s.oid, + s.nspname, + s.nspowner; +-- Filter on relkind so that we only count tables. This must be done in the ON clause so that +-- we still get a row for schemas with no tables. +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION msar.list_schemas() RETURNS jsonb AS $$/* +Return a json array of objects describing the user-defined schemas in the database. + +PostgreSQL system schemas are ignored. + +Internal Mathesar-specifc schemas are INCLUDED. These should be filtered out by the caller. This +behavior is to avoid tight coupling between this function and other SQL files that might need to +define additional Mathesar-specific schemas as our codebase grows. + +Each returned JSON object in the array will have the form: + { + "oid": + "name": + "description": + "owner_oid": , + "current_role_priv": [], + "current_role_owns": , + "table_count": + } +*/ +SELECT jsonb_agg(schema_data) +FROM msar.schema_info_table() AS schema_data +WHERE schema_data.name <> 'information_schema' +AND schema_data.name NOT LIKE 'pg_%'; +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION msar.get_schema(sch_id regnamespace) RETURNS jsonb AS $$/* +Return a json object describing the user-defined schema in the database. + +Each returned JSON object will have the form: + { + "oid": + "name": + "description": + "owner_oid": , + "current_role_priv": [], + "current_role_owns": , + "table_count": + } +*/ +SELECT to_jsonb(schema_data) +FROM msar.schema_info_table() AS schema_data +WHERE schema_data.oid = sch_id; +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION msar.list_schema_privileges(sch_id regnamespace) RETURNS jsonb AS $$/* +Given a schema, returns a json array of objects with direct, non-default schema privileges + +Each returned JSON object in the array has the form: + { + "role_oid": , + "direct" [] + } +*/ +WITH priv_cte AS ( + SELECT + jsonb_build_object( + 'role_oid', pgr.oid::bigint, + 'direct', jsonb_agg(acl.privilege_type) + ) AS p + FROM + pg_catalog.pg_roles AS pgr, + pg_catalog.pg_namespace AS pgn, + aclexplode(COALESCE(pgn.nspacl, acldefault('n', pgn.nspowner))) AS acl + WHERE pgn.oid = sch_id AND pgr.oid = acl.grantee AND pgr.rolname NOT LIKE 'pg_%' + GROUP BY pgr.oid, pgn.oid +) +SELECT COALESCE(jsonb_agg(priv_cte.p), '[]'::jsonb) FROM priv_cte; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +DROP FUNCTION IF EXISTS msar.role_info_table(); +CREATE OR REPLACE FUNCTION msar.role_info_table() RETURNS TABLE +( + oid bigint, -- The OID of the role. + name name, -- Name of the role. + super boolean, -- Whether the role has SUPERUSER status. + inherits boolean, -- Whether the role has INHERIT attribute. + create_role boolean, -- Whether the role has CREATEROLE attribute. + create_db boolean, -- Whether the role has CREATEDB attribute. + login boolean, -- Whether the role has LOGIN attribute. + description text, -- A description of the role + members jsonb -- The member roles that *directly* inherit the role. +) AS $$/* +Returns a table describing all the roles present on the database server. +*/ +WITH rolemembers as ( + SELECT + pgr.oid AS oid, + jsonb_agg( + jsonb_build_object( + 'oid', pgm.member::bigint, + 'admin', pgm.admin_option + ) + ) AS members + FROM pg_catalog.pg_roles pgr + INNER JOIN pg_catalog.pg_auth_members pgm ON pgr.oid=pgm.roleid + GROUP BY pgr.oid +) +SELECT + r.oid::bigint AS oid, + r.rolname AS name, + r.rolsuper AS super, + r.rolinherit AS inherits, + r.rolcreaterole AS create_role, + r.rolcreatedb AS create_db, + r.rolcanlogin AS login, + pg_catalog.shobj_description(r.oid, 'pg_authid') AS description, + rolemembers.members AS members +FROM pg_catalog.pg_roles r +LEFT OUTER JOIN rolemembers ON r.oid = rolemembers.oid; +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION msar.list_roles() RETURNS jsonb AS $$/* +Return a json array of objects with the list of roles in a database server, +excluding pg system roles. + +Each returned JSON object in the array has the form: + { + "oid": + "name": + "super": + "inherits": + "create_role": + "create_db": + "login": + "description": + "members": <[ + { "oid": , "admin": } + ]|null> + } +*/ +SELECT jsonb_agg(role_data) +FROM msar.role_info_table() AS role_data +WHERE role_data.name NOT LIKE 'pg_%'; +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION msar.get_role(rolename text) RETURNS jsonb AS $$/* +Given a rolename, return a JSON object describing the role in a database server. + +The returned JSON object has the form: + { + "oid": + "name": + "super": + "inherits": + "create_role": + "create_db": + "login": + "description": + "members": <[ + { "oid": , "admin": } + ]|null> + } +*/ +SELECT to_jsonb(role_data) +FROM msar.role_info_table() AS role_data +WHERE role_data.name = rolename; +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION +msar.build_grant_membership_expr(parent_rol_id regrole, g_roles oid[]) RETURNS TEXT AS $$ +SELECT string_agg( + format( + 'GRANT %1$I TO %2$I', + msar.get_role_name(parent_rol_id), + msar.get_role_name(rol_id) + ), + E';\n' +) || E';\n' +FROM unnest(g_roles) as x(rol_id); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_revoke_membership_expr(parent_rol_id regrole, r_roles oid[]) RETURNS TEXT AS $$ +SELECT string_agg( + format( + 'REVOKE %1$I FROM %2$I', + msar.get_role_name(parent_rol_id), + msar.get_role_name(rol_id) + ), + E';\n' +) || E';\n' +FROM unnest(r_roles) as x(rol_id); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.set_members_to_role(parent_rol_id regrole, members oid[]) RETURNS jsonb AS $$/* +Grant/Revoke direct membership to/from roles. + +Returns a json object describing the updated information of the parent role. + + { + "oid": + "name": + "super": + "inherits": + "create_role": + "create_db": + "login": + "description": + "members": <[ + { "oid": , "admin": } + ]|null> + } + +Args: + parent_rol_id: The OID of role whose membership will be granted/revoked to/from other roles. + members: An array of role OID(s) whom we want to grant direct membership of the parent role. + Only the OID(s) present in the array will be granted membership of parent role, + Membership will be revoked for existing members not present in this array. +*/ +DECLARE + parent_role_name text := msar.get_role_name(parent_rol_id); + parent_role_info jsonb := msar.get_role(parent_role_name); + all_members_array bigint[]; + revoke_members_array bigint[]; + set_members_expr text; +BEGIN + -- Get all the members of parent_role. + SELECT array_agg(x.oid) + FROM jsonb_to_recordset( + CASE WHEN parent_role_info ->> 'members' IS NOT NULL + THEN parent_role_info -> 'members' + ELSE NULL END + ) AS x(oid oid, admin boolean) + INTO all_members_array; + -- Find all the roles whose membership we want to revoke. + SELECT ARRAY( + SELECT unnest(all_members_array) + EXCEPT + SELECT unnest(members) + ) INTO revoke_members_array; + -- REVOKE/GRANT membership for parent_role. + set_members_expr := concat_ws( + E'\n', + msar.build_revoke_membership_expr(parent_rol_id, revoke_members_array), + msar.build_grant_membership_expr(parent_rol_id, members) + ); + EXECUTE set_members_expr; + -- Return the updated parent_role info including membership details. + RETURN msar.get_role(parent_role_name); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_current_role() RETURNS jsonb AS $$/* +Returns a JSON object describing the current_role and the parent role(s) whose +privileges are immediately available to current_role without doing SET ROLE. +*/ +SELECT jsonb_build_object( + 'current_role', msar.get_role(current_role), + 'parent_roles', COALESCE(array_remove( + array_agg( + CASE WHEN pg_has_role(current_role, role_data.name, 'USAGE') + THEN msar.get_role(role_data.name) END + ), NULL + ), ARRAY[]::jsonb[]) +) +FROM msar.role_info_table() AS role_data +WHERE role_data.name NOT LIKE 'pg_%' +AND role_data.name != current_role; +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION msar.list_db_priv() RETURNS jsonb AS $$/* +Given a database name, returns a json array of objects with database privileges for non-inherited roles. + +Each returned JSON object in the array has the form: + { + "role_oid": , + "direct" [] + } +*/ +WITH priv_cte AS ( + SELECT + jsonb_build_object( + 'role_oid', pgr.oid::bigint, + 'direct', jsonb_agg(acl.privilege_type) + ) AS p + FROM + pg_catalog.pg_roles AS pgr, + pg_catalog.pg_database AS pgd, + aclexplode(COALESCE(pgd.datacl, acldefault('d', pgd.datdba))) AS acl + WHERE pgd.datname = pg_catalog.current_database() + AND pgr.oid = acl.grantee AND pgr.rolname NOT LIKE 'pg_%' + GROUP BY pgr.oid, pgd.oid +) +SELECT COALESCE(jsonb_agg(priv_cte.p), '[]'::jsonb) FROM priv_cte; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.list_database_privileges_for_current_role(dat_id oid) RETURNS jsonb AS $$/* +Return a JSONB array of all privileges current_user holds on the passed database. +*/ +SELECT coalesce(jsonb_agg(privilege), '[]'::jsonb) +FROM + unnest( + ARRAY['CONNECT', 'CREATE', 'TEMPORARY'] + ) AS x(privilege), + pg_catalog.has_database_privilege(dat_id, privilege) as has_privilege +WHERE has_privilege; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_current_database_info() RETURNS jsonb AS $$/* +Return information about the current database. + +The returned JSON object has the form: + { + "oid": , + "name": , + "owner_oid": , + "current_role_priv": [], + "current_role_owner": + } +*/ +SELECT jsonb_build_object( + 'oid', pgd.oid::bigint, + 'name', pgd.datname, + 'owner_oid', pgd.datdba::bigint, + 'current_role_priv', msar.list_database_privileges_for_current_role(pgd.oid), + 'current_role_owns', pg_catalog.pg_has_role(pgd.datdba, 'USAGE') +) FROM pg_catalog.pg_database AS pgd +WHERE pgd.datname = pg_catalog.current_database(); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.list_table_privileges(tab_id regclass) RETURNS jsonb AS $$/* +Given a table, returns a json array of objects with direct, non-default table privileges. + +Each returned JSON object in the array has the form: + { + "role_oid": , + "direct" [] + } +*/ +WITH priv_cte AS ( + SELECT + jsonb_build_object( + 'role_oid', pgr.oid::bigint, + 'direct', jsonb_agg(acl.privilege_type) + ) AS p + FROM + pg_catalog.pg_roles AS pgr, + pg_catalog.pg_class AS pgc, + aclexplode(COALESCE(pgc.relacl, acldefault('r', pgc.relowner))) AS acl + WHERE pgc.oid = tab_id AND pgr.oid = acl.grantee AND pgr.rolname NOT LIKE 'pg_%' + GROUP BY pgr.oid, pgc.oid +) +SELECT COALESCE(jsonb_agg(priv_cte.p), '[]'::jsonb) FROM priv_cte; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- ROLE MANIPULATION FUNCTIONS +-- +-- Functions in this section should always involve creating, granting, or revoking privileges or +-- roles +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +msar.create_role(rolename text, password_ text, login_ boolean) RETURNS jsonb AS $$/* +Creates a login/non-login role, depending on whether the login_ flag is set. +Only the rolename field is required, the password field is required only if login_ is set to true. + +Returns a JSON object describing the created role in the form: + { + "oid": + "name": + "super": + "inherits": + "create_role": + "create_db": + "login": + "description": + "members": <[ + { "oid": , "admin": } + ]|null> + } + +Args: + rolename: The name of the role to be created, unquoted. + password_: The password for the rolename to set, unquoted. + login_: Specify whether the role to be created could login. +*/ +BEGIN + CASE WHEN login_ THEN + EXECUTE format('CREATE USER %I WITH PASSWORD %L', rolename, password_); + ELSE + EXECUTE format('CREATE ROLE %I', rolename); + END CASE; + RETURN msar.get_role(rolename); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.drop_role(rol_id regrole) RETURNS void AS $$/* +Drop a role. + +Note: +- To drop a superuser role, you must be a superuser yourself. +- To drop non-superuser roles, you must have CREATEROLE privilege and have been granted ADMIN OPTION on the role. + +Args: + rol_id: The OID of the role to drop on the database. +*/ +BEGIN + EXECUTE format('DROP ROLE %I', msar.get_role_name(rol_id)); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.build_database_privilege_replace_expr(rol_id regrole, privileges_ jsonb) RETURNS TEXT AS $$ +SELECT string_agg( + format( + concat( + CASE WHEN privileges_ ? val THEN 'GRANT' ELSE 'REVOKE' END, + ' %1$s ON DATABASE %2$I ', + CASE WHEN privileges_ ? val THEN 'TO' ELSE 'FROM' END, + ' %3$I' + ), + val, + pg_catalog.current_database(), + msar.get_role_name(rol_id) + ), + E';\n' +) || E';\n' +FROM unnest(ARRAY['CONNECT', 'CREATE', 'TEMPORARY']) as x(val); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.replace_database_privileges_for_roles(priv_spec jsonb) RETURNS jsonb AS $$/* +Grant/Revoke privileges for a set of roles on the current database. + +Args: + priv_spec: An array defining the privileges to grant or revoke for each role. + +Each object in the priv_spec should have the form: +{role_oid: , privileges: SET<"CONNECT"|"CREATE"|"TEMPORARY">} + +Any privilege that exists in the privileges subarray will be granted. Any which is missing will be +revoked. +*/ +BEGIN +EXECUTE string_agg( + msar.build_database_privilege_replace_expr(role_oid, direct), + E';\n' +) || ';' +FROM jsonb_to_recordset(priv_spec) AS x(role_oid regrole, direct jsonb); +RETURN msar.list_db_priv(); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_schema_privilege_replace_expr(sch_id regnamespace, rol_id regrole, privileges_ jsonb) + RETURNS TEXT AS $$ +SELECT string_agg( + format( + concat( + CASE WHEN privileges_ ? val THEN 'GRANT' ELSE 'REVOKE' END, + ' %1$s ON SCHEMA %2$I ', + CASE WHEN privileges_ ? val THEN 'TO' ELSE 'FROM' END, + ' %3$I' + ), + val, + msar.get_schema_name(sch_id), + msar.get_role_name(rol_id) + ), + E';\n' +) || E';\n' +FROM unnest(ARRAY['USAGE', 'CREATE']) as x(val); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.replace_schema_privileges_for_roles(sch_id regnamespace, priv_spec jsonb) RETURNS jsonb AS $$/* +Grant/Revoke privileges for a set of roles on the given schema. + +Args: + sch_id The OID of the schema for which we're setting privileges for roles. + priv_spec: An array defining the privileges to grant or revoke for each role. + +Each object in the priv_spec should have the form: +{role_oid: , privileges: SET<"USAGE"|"CREATE">} + +Any privilege that exists in the privileges subarray will be granted. Any which is missing will be +revoked. +*/ +BEGIN +EXECUTE string_agg( + msar.build_schema_privilege_replace_expr(sch_id, role_oid, direct), + E';\n' +) || ';' +FROM jsonb_to_recordset(priv_spec) AS x(role_oid regrole, direct jsonb); +RETURN msar.list_schema_privileges(sch_id); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_table_privilege_replace_expr(tab_id regclass, rol_id regrole, privileges_ jsonb) + RETURNS TEXT AS $$ +SELECT string_agg( + format( + concat( + CASE WHEN privileges_ ? val THEN 'GRANT' ELSE 'REVOKE' END, + ' %1$s ON TABLE %2$I.%3$I ', + CASE WHEN privileges_ ? val THEN 'TO' ELSE 'FROM' END, + ' %4$I' + ), + val, + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + msar.get_role_name(rol_id) + ), + E';\n' +) || E';\n' +FROM unnest(ARRAY['INSERT', 'SELECT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER']) as x(val); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.replace_table_privileges_for_roles(tab_id regclass, priv_spec jsonb) RETURNS jsonb AS $$/* +Grant/Revoke privileges for a set of roles on the given table. + +Args: + tab_id The OID of the table for which we're setting privileges for roles. + priv_spec: An array defining the privileges to grant or revoke for each role. + +Each object in the priv_spec should have the form: +{role_oid: , privileges: SET<"INSERT"|"SELECT"|"UPDATE"|"DELETE"|"TRUNCATE"|"REFERENCES"|"TRIGGER">} + +Any privilege that exists in the privileges subarray will be granted. Any which is missing will be +revoked. +*/ +BEGIN +EXECUTE string_agg( + msar.build_table_privilege_replace_expr(tab_id, role_oid, direct), + E';\n' +) || ';' +FROM jsonb_to_recordset(priv_spec) AS x(role_oid regrole, direct jsonb); +RETURN msar.list_table_privileges(tab_id); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +DROP FUNCTION IF EXISTS msar.transfer_database_ownership(regrole); +CREATE OR REPLACE FUNCTION +msar.transfer_database_ownership(new_owner_oid regrole) RETURNS jsonb AS $$/* +Transfers ownership of the current database to a new owner. + +Args: + new_owner_oid: The OID of the role whom we want to be the new owner of the current database. + +NOTE: To successfully transfer ownership of a database to a new owner the current user must: + - Be a Superuser/Owner of the current database. + - Be a `MEMBER` of the new owning role. i.e. The current role should be able to `SET ROLE` + to the new owning role. + - Have `CREATEDB` privilege. +*/ +BEGIN + EXECUTE format( + 'ALTER DATABASE %I OWNER TO %I', + pg_catalog.current_database(), + msar.get_role_name(new_owner_oid) + ); + RETURN msar.get_current_database_info(); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.transfer_schema_ownership(sch_id regnamespace, new_owner_oid regrole) RETURNS jsonb AS $$/* +Transfers ownership of a given schema to a new owner. + +Args: + sch_id: The OID of the schema to transfer. + new_owner_oid: The OID of the role whom we want to be the new owner of the schema. + +NOTE: To successfully transfer ownership of a schema to a new owner the current user must: + - Be a Superuser/Owner of the schema. + - Be a `MEMBER` of the new owning role. i.e. The current role should be able to `SET ROLE` + to the new owning role. + - Have `CREATE` privilege for the database. +*/ +BEGIN + EXECUTE format( + 'ALTER SCHEMA %I OWNER TO %I', + msar.get_schema_name(sch_id), + msar.get_role_name(new_owner_oid) + ); + RETURN msar.get_schema(sch_id); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.transfer_table_ownership(tab_id regclass, new_owner_oid regrole) RETURNS jsonb AS $$/* +Transfers ownership of a given table to a new owner. + +Args: + tab_id: The OID of the table to transfer. + new_owner_oid: The OID of the role whom we want to be the new owner of the table. + +NOTE: To successfully transfer ownership of a table to a new owner the current user must: + - Be a Superuser/Owner of the table. + - Be a `MEMBER` of the new owning role. i.e. The current role should be able to `SET ROLE` + to the new owning role. + - Have `CREATE` privilege on the table's schema. +*/ +BEGIN + EXECUTE format( + 'ALTER TABLE %I.%I OWNER TO %I', + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + msar.get_role_name(new_owner_oid) + ); + RETURN msar.get_table(tab_id); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- ALTER SCHEMA FUNCTIONS +-- +-- Functions in this section should always involve 'ALTER SCHEMA'. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +DROP FUNCTION IF EXISTS msar.rename_schema(oid, text); +CREATE OR REPLACE FUNCTION msar.rename_schema(sch_id oid, new_sch_name text) RETURNS void AS $$/* +Change a schema's name + +Args: + sch_id: The OID of the schema to rename + new_sch_name: A new for the schema, UNQUOTED +*/ +DECLARE + old_sch_name text := msar.get_schema_name(sch_id); +BEGIN + IF old_sch_name = new_sch_name THEN + -- Return early if the names are the same. This avoids an error from Postgres. + RETURN; + END IF; + EXECUTE format('ALTER SCHEMA %I RENAME TO %I', old_sch_name, new_sch_name); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.set_schema_description( + sch_id oid, + description text +) RETURNS void AS $$/* +Set the PostgreSQL description (aka COMMENT) of a schema. + +Descriptions are removed by passing an empty string or NULL. + +Args: + sch_id: The OID of the schema. + description: The new description, UNQUOTED +*/ +BEGIN + EXECUTE format('COMMENT ON SCHEMA %I IS %L', msar.get_schema_name(sch_id), description); +END; +$$ LANGUAGE plpgsql; + + +DROP FUNCTION IF EXISTS msar.patch_schema(oid, jsonb); +CREATE OR REPLACE FUNCTION msar.patch_schema(sch_id oid, patch jsonb) RETURNS jsonb AS $$/* +Modify a schema according to the given patch. + +Args: + sch_id: The OID of the schema. + patch: A JSONB object with the following keys: + - name: (optional) The new name of the schema + - description: (optional) The new description of the schema. To remove a description, pass an + empty string or NULL. + +Returns: + A json object describing the user-defined schema in the database. +*/ +BEGIN + PERFORM msar.rename_schema(sch_id, patch->>'name'); + PERFORM CASE WHEN patch ? 'description' + THEN msar.set_schema_description(sch_id, patch->>'description') END; + RETURN msar.get_schema(sch_id); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.patch_schema(sch_name text, patch jsonb) RETURNS void AS $$/* +Modify a schema according to the given patch. + +Args: + sch_name: The name of the schema, UNQUOTED + patch: A JSONB object as specified by msar.patch_schema(sch_id oid, patch jsonb) +*/ +BEGIN + PERFORM msar.patch_schema(msar.get_schema_oid(sch_name), patch); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- CREATE SCHEMA FUNCTIONS +-- +-- Create a schema. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +-- This gets rid of `msar.create_schema` as defined in Mathesar 0.1.7. We don't want that old +-- function definition hanging around because it will get invoked when passing NULL as the second +-- argument like `msar.create_schema('foo', NULL)`. +DROP FUNCTION IF EXISTS msar.create_schema(text, boolean); + +CREATE OR REPLACE FUNCTION msar.create_schema_if_not_exists(sch_name text) RETURNS oid AS $$/* +Ensure that a schema exists in the database. + +Args: + sch_name: the name of the schema to be created, UNQUOTED. + +Returns: + The integer OID of the schema +*/ +BEGIN + EXECUTE 'CREATE SCHEMA IF NOT EXISTS ' || quote_ident(sch_name); + RETURN msar.get_schema_oid(sch_name); +END; +$$ LANGUAGE plpgsql; + + +DROP FUNCTION IF EXISTS msar.create_schema(text, text); +CREATE OR REPLACE FUNCTION msar.create_schema( + sch_name text, + own_id regrole, + description text DEFAULT '' +) RETURNS jsonb AS $$/* +Create a schema, possibly with a description. + +If a schema with the given name already exists, an exception will be raised. + +Args: + sch_name: The name of the schema to be created, UNQUOTED. + own_id: (optional) The OID of the role who will own the new schema. + description: (optional) A description for the schema, UNQUOTED. + +Returns: + A json object describing the user-defined schema in the database. + +Note: + - This function does not support IF NOT EXISTS because it's simpler that way. I originally tried + to support descriptions and if_not_exists in the same function, but as I discovered more edge cases + and inconsistencies, it got too complex, and I didn't think we'd have a good enough use case for it. + - If own_id is NULL, the current role will be the owner of the new schema. +*/ +DECLARE schema_oid oid; +BEGIN + EXECUTE 'CREATE SCHEMA ' || quote_ident(sch_name); + schema_oid := msar.get_schema_oid(sch_name); + PERFORM msar.set_schema_description(schema_oid, description); + IF own_id IS NOT NULL THEN + PERFORM msar.transfer_schema_ownership(schema_oid, own_id); + END IF; + RETURN msar.get_schema(schema_oid); +END; +$$ LANGUAGE plpgsql; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- DROP DATABASE FUNCTIONS +-- +-- Drop a database. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +msar.drop_database_query(dat_id oid) RETURNS text AS $$/* +Return the SQL query to drop a database. + +If no database exists with the given oid, an exception will be raised. + +Args: + dat_id: The OID of the role to drop. +*/ +BEGIN + RETURN format('DROP DATABASE %I', msar.get_database_name(dat_id)); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.drop_database_query(dat_name text) RETURNS text AS $$/* +Return the SQL query to drop a database. + +Args: + dat_id: An unqoted name of the database to be dropped. +*/ +BEGIN + RETURN format('DROP DATABASE %I', dat_name); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- DROP SCHEMA FUNCTIONS +-- +-- Drop a schema. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +msar.drop_schema(sch_name text, cascade_ boolean) RETURNS void AS $$/* +Drop a schema + +If no schema exists with the given name, an exception will be raised. + +Args: + sch_name: An unqoted name of the schema to be dropped + cascade_: When true, dependent objects will be dropped automatically +*/ +DECLARE + cascade_sql text = CASE cascade_ WHEN TRUE THEN ' CASCADE' ELSE '' END; +BEGIN + EXECUTE 'DROP SCHEMA ' || quote_ident(sch_name) || cascade_sql; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.drop_schema(sch_id oid, cascade_ boolean) RETURNS void AS $$/* +Drop a schema + +If no schema exists with the given oid, an exception will be raised. + +Args: + sch_id: The OID of the schema to drop + cascade_: When true, dependent objects will be dropped automatically +*/ +BEGIN + PERFORM msar.drop_schema(msar.get_schema_name(sch_id), cascade_); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- ALTER TABLE FUNCTIONS +-- +-- Functions in this section should always involve 'ALTER TABLE'. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +-- Rename table ------------------------------------------------------------------------------------ + +DROP FUNCTION IF EXISTS msar.rename_table(text, text, text); +CREATE OR REPLACE FUNCTION +msar.rename_table(sch_name text, old_tab_name text, new_tab_name text) RETURNS void AS $$/* +Change a table's name, returning the command executed. + +Args: + sch_name: unquoted schema name where the table lives + old_tab_name: unquoted, unqualified original table name + new_tab_name: unquoted, unqualified new table name +*/ +BEGIN + IF old_tab_name = new_tab_name THEN + -- Return early if the names are the same. This avoids an error from Postgres. + RETURN; + END IF; + EXECUTE format('ALTER TABLE %I.%I RENAME TO %I', sch_name, old_tab_name, new_tab_name); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +DROP FUNCTION IF EXISTS msar.rename_table(oid, text); +CREATE OR REPLACE FUNCTION +msar.rename_table(tab_id oid, new_tab_name text) RETURNS void AS $$/* +Change a table's name, returning the command executed. + +Args: + tab_id: the OID of the table whose name we want to change + new_tab_name: unquoted, unqualified table name +*/ +BEGIN + PERFORM msar.rename_table( + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + new_tab_name + ); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + + +-- Comment on table -------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +__msar.comment_on_table(tab_name text, comment_ text) RETURNS text AS $$/* +Change the description of a table, returning command executed. + +Args: + tab_name: The qualified, quoted name of the table whose comment we will change. + comment_: The new comment. Any quotes or special characters must be escaped. +*/ +DECLARE + comment_or_null text := COALESCE(comment_, 'NULL'); +BEGIN +RETURN __msar.exec_ddl('COMMENT ON TABLE %s IS %s', tab_name, comment_or_null); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.comment_on_table(tab_id oid, comment_ text) RETURNS text AS $$/* +Change the description of a table, returning command executed. + +Args: + tab_id: The OID of the table whose comment we will change. + comment_: The new comment. +*/ +SELECT __msar.comment_on_table( + __msar.get_qualified_relation_name_or_null(tab_id), + quote_literal(comment_) +); +$$ LANGUAGE SQL; + + +CREATE OR REPLACE FUNCTION +msar.comment_on_table(sch_name text, tab_name text, comment_ text) RETURNS text AS $$/* +Change the description of a table, returning command executed. + +Args: + sch_name: The schema of the table whose comment we will change. + tab_name: The name of the table whose comment we will change. + comment_: The new comment. +*/ +SELECT __msar.comment_on_table( + __msar.build_qualified_name_sql(sch_name, tab_name), + quote_literal(comment_) +); +$$ LANGUAGE SQL; + + +-- Alter table ------------------------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION +msar.alter_table(tab_id oid, tab_alters jsonb) RETURNS text AS $$/* +Alter the name, description, or columns of a table, returning name of the altered table. + +Args: + tab_id: The OID of the table whose columns we'll alter. + tab_alters: a JSONB describing the alterations to make. + + The tab_alters should have the form: + { + "name": , + "description": + "columns": , + } +*/ +DECLARE + new_tab_name text; + col_alters jsonb; +BEGIN + new_tab_name := tab_alters->>'name'; + col_alters := tab_alters->'columns'; + PERFORM msar.rename_table(tab_id, new_tab_name); + PERFORM CASE WHEN tab_alters ? 'description' + THEN msar.comment_on_table(tab_id, tab_alters->>'description') END; + PERFORM msar.alter_columns(tab_id, col_alters); + RETURN __msar.get_qualified_relation_name_or_null(tab_id); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- ALTER TABLE FUNCTIONS: Column operations +-- +-- Functions in this section should always involve 'ALTER TABLE', and one or more columns +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +-- Update table primary key sequence to latest ----------------------------------------------------- + +CREATE OR REPLACE FUNCTION +__msar.update_pk_sequence_to_latest(tab_name text, col_name text) RETURNS text AS $$/* +Update the primary key sequence to the maximum of the primary key column, plus one. + +Args: + tab_name: Fully-qualified, quoted table name + col_name: The column name of the primary key. +*/ +BEGIN + RETURN __msar.exec_ddl( + 'SELECT ' + || 'setval(' + || 'pg_get_serial_sequence(''%1$s'', ''%2$s''), coalesce(max(%2$s) + 1, 1), false' + || ') ' + || 'FROM %1$s', + tab_name, col_name + ); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.update_pk_sequence_to_latest(tab_id oid, col_id integer) RETURNS text AS $$/* +Update the primary key sequence to the maximum of the primary key column, plus one. + +Args: + tab_id: The OID of the table whose primary key sequence we'll update. + col_id: The attnum of the primary key column. +*/ +DECLARE tab_name text; +DECLARE col_name text; +BEGIN + tab_name := __msar.get_qualified_relation_name(tab_id); + col_name := quote_ident(msar.get_column_name(tab_id, col_id)); + RETURN __msar.update_pk_sequence_to_latest(tab_name, col_name); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.update_pk_sequence_to_latest(sch_name text, tab_name text, col_name text) RETURNS text AS $$/* +Update the primary key sequence to the maximum of the primary key column, plus one. + +Args: + sch_name: The schema where the table whose primary key sequence we'll update lives. + tab_name: The table whose primary key sequence we'll update. + col_name: The name of the primary key column. +*/ +DECLARE qualified_tab_name text; +BEGIN + qualified_tab_name := __msar.build_qualified_name_sql(sch_name, tab_name); + RETURN __msar.update_pk_sequence_to_latest(qualified_tab_name, quote_ident(col_name)); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- Drop columns from table ------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +__msar.drop_columns(tab_name text, col_names variadic text[]) RETURNS text AS $$/* +Drop the given columns from the given table. + +Args: + tab_name: Fully-qualified, quoted table name. + col_names: The column names to be dropped, quoted. +*/ +DECLARE column_drops text; +BEGIN + SELECT string_agg(format('DROP COLUMN %s', col), ', ') + FROM unnest(col_names) AS col + INTO column_drops; + RETURN __msar.exec_ddl('ALTER TABLE %s %s', tab_name, column_drops); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.drop_columns(tab_id oid, col_ids variadic integer[]) RETURNS text AS $$/* +Drop the given columns from the given table. + +Args: + tab_id: OID of the table whose columns we'll drop. + col_ids: The attnums of the columns to drop. +*/ +DECLARE col_names text[]; +BEGIN + SELECT array_agg(quote_ident(attname)) + FROM pg_catalog.pg_attribute + WHERE attrelid=tab_id AND NOT attisdropped AND ARRAY[attnum::integer] <@ col_ids + INTO col_names; + PERFORM __msar.drop_columns( + __msar.get_qualified_relation_name_or_null(tab_id), + variadic col_names + ); + RETURN array_length(col_names, 1); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.drop_columns(sch_name text, tab_name text, col_names variadic text[]) RETURNS text AS $$/* +Drop the given columns from the given table. + +Args: + sch_name: The schema where the table whose columns we'll drop lives, unquoted. + tab_name: The table whose columns we'll drop, unquoted and unqualified. + col_names: The columns to drop, unquoted. +*/ +DECLARE prepared_col_names text[]; +DECLARE fully_qualified_tab_name text; +BEGIN + SELECT array_agg(quote_ident(col)) FROM unnest(col_names) AS col INTO prepared_col_names; + fully_qualified_tab_name := __msar.build_qualified_name_sql(sch_name, tab_name); + RETURN __msar.drop_columns(fully_qualified_tab_name, variadic prepared_col_names); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- Column creation definition type ----------------------------------------------------------------- + +DROP TYPE IF EXISTS __msar.col_def CASCADE; +CREATE TYPE __msar.col_def AS ( + name_ text, -- The name of the column to create, quoted. + type_ text, -- The type of the column to create, fully specced with arguments. + not_null boolean, -- A boolean to describe whether the column is nullable or not. + default_ text, -- Text SQL giving the default value for the column. + identity_ boolean, -- A boolean giving whether the column is an identity pkey column. + description text -- A text that will become a comment for the column +); + + +CREATE OR REPLACE FUNCTION +msar.get_fresh_copy_name(tab_id oid, col_id smallint) RETURNS text AS $$/* +This function generates a name to be used for a duplicated column. + +Given an original column name 'abc', the resulting copies will be named 'abc ', where is +minimal (at least 1) subject to the restriction that 'abc ' is not already a column of the table +given. + +Args: + tab_id: the table for which we'll generate a column name. + col_id: the original column whose name we'll use as the prefix in our copied column name. +*/ +DECLARE + original_col_name text; + idx integer := 1; +BEGIN + original_col_name := attname FROM pg_attribute WHERE attrelid=tab_id AND attnum=col_id; + WHILE format('%s %s', original_col_name, idx) IN ( + SELECT attname FROM pg_attribute WHERE attrelid=tab_id + ) LOOP + idx = idx + 1; + END LOOP; + RETURN format('%s %s', original_col_name, idx); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION __msar.get_duplicate_col_defs( + tab_id oid, + col_ids smallint[], + new_names text[], + copy_defaults boolean +) RETURNS __msar.col_def[] AS $$/* +Get an array of __msar.col_def from given columns in a table. + +Args: + tab_id: The OID of the table containing the column whose definition we want. + col_ids: The attnums of the columns whose definitions we want. + new_names: The desired names of the column defs. Must be in same order as col_ids, and same + length. + copy_defaults: Whether or not we should copy the defaults +*/ +SELECT array_agg( + ( + -- build a name for the duplicate column + quote_ident(COALESCE(new_name, msar.get_fresh_copy_name(tab_id, pg_columns.attnum))), + -- build text specifying the type of the duplicate column + format_type(atttypid, atttypmod), + -- set the duplicate column to be nullable, since it will initially be empty + false, + -- set the default value for the duplicate column if specified + CASE WHEN copy_defaults THEN pg_get_expr(adbin, tab_id) END, + -- We don't set a duplicate column as a primary key, since that would cause an error. + false, + msar.col_description(tab_id, pg_columns.attnum) + )::__msar.col_def +) +FROM pg_attribute AS pg_columns + JOIN unnest(col_ids, new_names) AS columns_to_copy(col_id, new_name) + ON pg_columns.attnum=columns_to_copy.col_id + LEFT JOIN pg_attrdef AS pg_column_defaults + ON pg_column_defaults.adnum=pg_columns.attnum AND pg_columns.attrelid=pg_column_defaults.adrelid +WHERE pg_columns.attrelid=tab_id; +$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_unique_column_name(tab_id oid, col_name text) RETURNS text AS $$/* +Get a unique column name based on the given name. + +Args: + tab_id: The OID of the table where the column name should be unique. + col_name: The resulting column name will be equal to or at least based on this. + +See the msar.get_fresh_copy_name function for how unique column names are generated. +*/ +DECLARE + col_attnum smallint; +BEGIN + col_attnum := msar.get_attnum(tab_id, col_name); + RETURN CASE + WHEN col_attnum IS NOT NULL THEN msar.get_fresh_copy_name(tab_id, col_attnum) ELSE col_name + END; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_unique_fkey_column_name(tab_id oid, fk_col_name text, frel_name text) + RETURNS text AS $$/* +Create a unique name for a foreign key column. + +Args: + tab_id: The OID of the table where the column name should be unique. + fk_col_name: The base name for the foreign key column. + frel_name: The name of the referent table. Used for creating fk_col_name if not given. + +Note that frel_name will be used to build the foreign key column name if it's not given. The result +will be of the form: _id. Then, we apply some logic to ensure the result is unique. +*/ +BEGIN + fk_col_name := COALESCE(fk_col_name, format('%s_id', frel_name)); + RETURN msar.build_unique_column_name(tab_id, fk_col_name); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.get_extracted_col_def_jsonb(tab_id oid, col_ids integer[]) RETURNS jsonb AS $$/* +Get a JSON array of column definitions from given columns for creation of an extracted table. + +See the __msar.process_col_def_jsonb for a description of the JSON. + +Args: + tab_id: The OID of the table containing the columns whose definitions we want. + col_ids: The attnum of the columns whose definitions we want. +*/ + +SELECT jsonb_agg( + jsonb_build_object( + 'name', attname, + 'type', jsonb_build_object('id', atttypid, 'modifier', atttypmod), + 'not_null', attnotnull, + 'default', + -- We only copy non-dynamic default expressions to new table to avoid double-use of sequences. + -- Sequences are owned by a specific column, and can't be reused without error. + CASE WHEN NOT msar.is_default_possibly_dynamic(tab_id, col_id) THEN + pg_get_expr(adbin, tab_id) + END + ) +) +FROM pg_attribute AS pg_columns + JOIN unnest(col_ids) AS columns_to_copy(col_id) + ON pg_columns.attnum=columns_to_copy.col_id + LEFT JOIN pg_attrdef AS pg_column_defaults + ON pg_column_defaults.adnum=pg_columns.attnum AND pg_columns.attrelid=pg_column_defaults.adrelid +WHERE pg_columns.attrelid=tab_id AND NOT msar.is_pkey_col(tab_id, col_id); +$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; + + +-- Add columns to table ---------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +__msar.prepare_fields_arg(fields text) RETURNS text AS $$/* +Convert the `fields` argument into an integer for use with the integertypmodin system function. + +Args: + fields: A string corresponding to the documented options from the doumentation at + https://www.postgresql.org/docs/13/datatype-datetime.html + +In order to construct the argument for intervaltypmodin, needed for constructing the typmod value +for INTERVAL types with arguments, we need to apply a transformation to the correct integer. This +transformation is quite arcane, and is lifted straight from the PostgreSQL C code. Given a non-null +fields argument, the steps are: +- Assign each substring of valid `fields` arguments the correct integer (from the Postgres src). +- Apply a bitshift mapping each integer to the according power of 2. +- Sum the results to get an integer signifying the fields argument. +*/ +SELECT COALESCE( + sum(1<> 'length', + CASE WHEN timespan_flag THEN __msar.prepare_fields_arg(typ_options ->> 'fields') END, + typ_options ->> 'precision', + typ_options ->> 'scale' + ], + null +)::cstring[] +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.get_formatted_base_type(typ_name text, typ_options jsonb) RETURNS text AS $$ /* +Build the appropriate type definition string, without Array brackets. + +This function uses some PostgreSQL internal functions to do its work. In particular, for any type +that takes options, This function uses the typmodin (read "type modification input") system +functions to convert the given options into a typmod integer. The typ_name given is converted into +the OID of the named type. These two pieces let us call `format_type` to get a canonical string +representation of the definition of the type, with its options. + +Args: + typ_name: This should be qualified and quoted as needed. + typ_options: These should be in the form described in msar.build_type_text. +*/ +DECLARE + typ_id oid; + timespan_flag boolean; + typmodin_func text; + typmod integer; +BEGIN + -- Here we just get the OID of the type. + typ_id := typ_name::regtype::oid; + -- This is a lookup of the function name for the typmodin function associated with the type, if + -- one exists. + typmodin_func := typmodin::text FROM pg_type WHERE oid=typ_id AND typmodin<>0; + -- This flag is needed since timespan types need special handling when converting the options into + -- the form needed to call the typmodin function. + timespan_flag := typcategory='T' FROM pg_type WHERE oid=typ_id; + IF ( + jsonb_typeof(typ_options) = 'null' -- The caller passed no type options + OR typ_options IS NULL -- The caller didn't even pass the type options key + OR typ_options='{}'::jsonb -- The caller passed an empty type options object + OR typmodin_func IS NULL -- The type doesn't actually accept type options + ) THEN + typmod := NULL; + ELSE + -- Here, we actually run the typmod function to get the output for use in the format_type call. + EXECUTE format( + 'SELECT %I(%L)', + typmodin_func, + __msar.build_typmodin_arg(typ_options, timespan_flag) + ) INTO typmod; + END IF; + RETURN format_type(typ_id::integer, typmod::integer); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.build_type_text(typ_jsonb jsonb) RETURNS text AS $$/* +Turns the given type-describing JSON into a proper string defining a type with arguments + +The input JSON should be of the form + { + "id": + "schema": , + "name": , + "modifier": , + "options": { + "length": , + "precision": , + "scale": + "fields": , + "array": + } + } + +All fields are optional, and a null value as input returns 'text' +*/ +SELECT COALESCE( + -- First choice is the type specified by numeric IDs, since they're most reliable. + format_type( + (typ_jsonb ->> 'id')::integer, + (typ_jsonb ->> 'modifier')::integer + ), + -- Second choice is the type specified by string IDs. + __msar.get_formatted_base_type( + COALESCE( + __msar.build_qualified_name_sql(typ_jsonb ->> 'schema', typ_jsonb ->> 'name'), + typ_jsonb ->> 'name', + 'text' -- We fall back to 'text' when input is null or empty. + ), + typ_jsonb -> 'options' + ) || CASE + WHEN (typ_jsonb -> 'options' ->> 'array')::boolean THEN + '[]' + ELSE '' + END +) +$$ LANGUAGE SQL; + + +CREATE OR REPLACE FUNCTION +msar.build_type_text_complete(typ_jsonb jsonb, old_type text) RETURNS text AS $$/* +Build the text name of a type, using the old type as a base if only options are given. + +The main use for this is to allow for altering only the options of the type of a column. + +Args: + typ_jsonb: This is a jsonb denoting the new type. + old_type: This is the old type name, with no options. + +The typ_jsonb should be in the form: +{ + "name": (optional), + "options": (optional) +} + +*/ +SELECT msar.build_type_text( + jsonb_strip_nulls( + jsonb_build_object( + 'name', COALESCE(typ_jsonb ->> 'name', old_type), + 'options', typ_jsonb -> 'options' + ) + ) +); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + + +CREATE OR REPLACE FUNCTION __msar.build_col_def_text(col __msar.col_def) RETURNS text AS $$/* +Build appropriate text defining the given column for table creation or alteration. +*/ +SELECT format( + '%s %s %s %s %s', + col.name_, + col.type_, + CASE WHEN col.not_null THEN 'NOT NULL' END, + 'DEFAULT ' || col.default_, + -- This can be used to define our default Mathesar primary key column. + -- TODO: We should really consider doing GENERATED *ALWAYS* (rather than BY DEFAULT), but this + -- breaks some other assumptions. + CASE WHEN col.identity_ THEN 'GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY' END +); +$$ LANGUAGE SQL; + + +CREATE OR REPLACE FUNCTION +__msar.process_col_def_jsonb( + tab_id oid, + col_defs jsonb, + raw_default boolean, + create_id boolean DEFAULT false +) RETURNS __msar.col_def[] AS $$/* +Create an __msar.col_def from a JSON array of column creation defining JSON blobs. + +Args: + tab_id: The OID of the table where we'll create the columns + col_defs: A jsonb array defining a column creation (must have "type" key; "name", + "not_null", and "default" keys optional). + raw_default: This boolean tells us whether we chould reproduce the default with or without quoting + and escaping. True means we don't quote or escape, but just use the raw value. + create_id: This boolean defines whether or not we should automatically add a default Mathesar 'id' + column to the input. + +The col_defs should have the form: +[ + { + "name": (optional), + "type": { + "name": (optional), + "options": (optional), + }, + "not_null": (optional; default false), + "default": (optional), + "description": (optional) + }, + { + ... + } +] + +For more info on the type.options object, see the msar.build_type_text function. All pieces are +optional. If an empty object {} is given, the resulting column will have a default name like +'Column ' and type TEXT. It will allow nulls and have a null default value. +*/ +WITH attnum_cte AS ( + SELECT MAX(attnum) AS m_attnum FROM pg_attribute WHERE attrelid=tab_id +), col_create_cte AS ( + SELECT ( + -- build a name for the column + COALESCE( + quote_ident(col_def_obj ->> 'name'), + quote_ident('Column ' || (attnum_cte.m_attnum + ROW_NUMBER() OVER ())), + quote_ident('Column ' || (ROW_NUMBER() OVER ())) + ), + -- build the column type + msar.build_type_text(col_def_obj -> 'type'), + -- set the not_null value for the column + col_def_obj ->> 'not_null', + -- set the default value for the column + CASE + WHEN col_def_obj ->> 'default' IS NULL THEN + NULL + WHEN raw_default THEN + col_def_obj ->> 'default' + ELSE + format('%L', col_def_obj ->> 'default') + END, + -- We don't allow setting the primary key column manually + false, + -- Set the description for the column + quote_literal(col_def_obj ->> 'description') + )::__msar.col_def AS col_defs + FROM attnum_cte, jsonb_array_elements(col_defs) AS col_def_obj + WHERE col_def_obj ->> 'name' IS NULL OR col_def_obj ->> 'name' <> 'id' +) +SELECT array_cat( + CASE + WHEN create_id THEN + -- The below tuple defines a default 'id' column for Mathesar. It has name id, type integer, + -- it's not null, it uses the 'identity' functionality to generate default values, has + -- a default comment. + ARRAY[('id', 'integer', true, null, true, 'Mathesar default ID column')]::__msar.col_def[] + END, + array_agg(col_defs) +) +FROM col_create_cte; +$$ LANGUAGE SQL; + + +CREATE OR REPLACE FUNCTION +__msar.add_columns(tab_name text, col_defs variadic __msar.col_def[]) RETURNS text AS $$/* +Add the given columns to the given table. + +Args: + tab_name: Fully-qualified, quoted table name. + col_defs: The columns to be added. +*/ +WITH ca_cte AS ( + SELECT string_agg( + 'ADD COLUMN ' || __msar.build_col_def_text(col), + ', ' + ) AS col_additions + FROM unnest(col_defs) AS col +) +SELECT __msar.exec_ddl('ALTER TABLE %s %s', tab_name, col_additions) FROM ca_cte; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.add_columns(tab_id oid, col_defs jsonb, raw_default boolean DEFAULT false) + RETURNS smallint[] AS $$/* +Add columns to a table. + +Args: + tab_id: The OID of the table to which we'll add columns. + col_defs: a JSONB array defining columns to add. See __msar.process_col_def_jsonb for details. + raw_default: Whether to treat defaults as raw SQL. DANGER! +*/ +DECLARE + col_create_defs __msar.col_def[]; + fq_table_name text := __msar.get_qualified_relation_name(tab_id); +BEGIN + col_create_defs := __msar.process_col_def_jsonb(tab_id, col_defs, raw_default); + PERFORM __msar.add_columns(fq_table_name, variadic col_create_defs); + + PERFORM + __msar.comment_on_column( + fq_table_name, + col_create_def.name_, + col_create_def.description + ) + FROM unnest(col_create_defs) AS col_create_def + WHERE col_create_def.description IS NOT NULL; + + RETURN array_agg(attnum) + FROM (SELECT * FROM pg_attribute WHERE attrelid=tab_id) L + INNER JOIN unnest(col_create_defs) R + ON quote_ident(L.attname) = R.name_; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.add_columns(sch_name text, tab_name text, col_defs jsonb, raw_default boolean) + RETURNS smallint[] AS $$/* +Add columns to a table. + +Args: + sch_name: unquoted schema name of the table to which we'll add columns. + tab_name: unquoted, unqualified name of the table to which we'll add columns. + col_defs: a JSONB array defining columns to add. See __msar.process_col_def_jsonb for details. + raw_default: Whether to treat defaults as raw SQL. DANGER! +*/ +SELECT msar.add_columns(msar.get_relation_oid(sch_name, tab_name), col_defs, raw_default); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- MATHESAR ADD CONSTRAINTS FUNCTIONS +-- +-- Add constraints to tables and (for NOT NULL) columns. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +-- Constraint creation definition type ------------------------------------------------------------- + +DROP TYPE IF EXISTS __msar.con_def CASCADE; +CREATE TYPE __msar.con_def AS ( +/* +This should be used in the context of a single ALTER TABLE command. So, no need to reference the +constrained table's OID. +*/ + name_ text, -- The name of the constraint to create, qualified and quoted. + type_ "char", -- The type of constraint to create, as a "char". See pg_constraint.contype + col_names text[], -- The columns for the constraint, quoted. + deferrable_ boolean, -- Whether or not the constraint is deferrable. + fk_rel_name text, -- The foreign table for an fkey, qualified and quoted. + fk_col_names text[], -- The foreign table's columns for an fkey, quoted. + fk_upd_action "char", -- Action taken when fk referent is updated. See pg_constraint.confupdtype. + fk_del_action "char", -- Action taken when fk referent is deleted. See pg_constraint.confdeltype. + fk_match_type "char", -- The match type of the fk constraint. See pg_constraint.confmatchtype. + expression text -- Text SQL giving the expression for the constraint (if applicable). +); + + +CREATE OR REPLACE FUNCTION msar.get_fkey_action_from_char("char") RETURNS text AS $$/* +Map the "char" from pg_constraint to the update or delete action string. +*/ +SELECT CASE + WHEN $1 = 'a' THEN 'NO ACTION' + WHEN $1 = 'r' THEN 'RESTRICT' + WHEN $1 = 'c' THEN 'CASCADE' + WHEN $1 = 'n' THEN 'SET NULL' + WHEN $1 = 'd' THEN 'SET DEFAULT' +END; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_fkey_match_type_from_char("char") RETURNS text AS $$/* +Convert a char to its proper string describing the match type. + +NOTE: Since 'PARTIAL' is not implemented (and throws an error), we don't use it here. +*/ +SELECT CASE + WHEN $1 = 'f' THEN 'FULL' + WHEN $1 = 's' THEN 'SIMPLE' +END; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION __msar.build_con_def_text(con __msar.con_def) RETURNS text AS $$/* +Build appropriate text defining the given constraint for table creation or alteration. + +If the given con.name_ is null, the syntax changes slightly (we don't add 'CONSTRAINT'). The FOREIGN +KEY constraint has a number of extra strings that may or may not be appended. The best +documentation for this is the FOREIGN KEY section of the CREATE TABLE docs: +https://www.postgresql.org/docs/current/sql-createtable.html + +One helpful note is that this function makes use heavy of the || operator. This operator returns +null if either side is null, and thus + + 'CONSTRAINT ' || con.name_ || ' ' + +is 'CONSTRAINT ' when con.name_ is not null, and simply null if con.name_ is null. +*/ +SELECT CASE + WHEN con.type_ = 'u' THEN -- It's a UNIQUE constraint + format( + '%sUNIQUE %s', + 'CONSTRAINT ' || con.name_ || ' ', + __msar.build_text_tuple(con.col_names) + ) + WHEN con.type_ = 'p' THEN -- It's a PRIMARY KEY constraint + format( + '%sPRIMARY KEY %s', + 'CONSTRAINT ' || con.name_ || ' ', + __msar.build_text_tuple(con.col_names) + ) + WHEN con.type_ = 'f' THEN -- It's a FOREIGN KEY constraint + format( + '%sFOREIGN KEY %s REFERENCES %s%s%s%s%s', + 'CONSTRAINT ' || con.name_ || ' ', + __msar.build_text_tuple(con.col_names), + con.fk_rel_name, + __msar.build_text_tuple(con.fk_col_names), + ' MATCH ' || msar.get_fkey_match_type_from_char(con.fk_match_type), + ' ON DELETE ' || msar.get_fkey_action_from_char(con.fk_del_action), + ' ON UPDATE ' || msar.get_fkey_action_from_char(con.fk_upd_action) + ) + ELSE + NULL + END + || CASE WHEN con.deferrable_ THEN 'DEFERRABLE' ELSE '' END; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.process_con_def_jsonb(tab_id oid, con_create_arr jsonb) + RETURNS __msar.con_def[] AS $$/* +Create an array of __msar.con_def from a JSON array of constraint creation defining JSON. + +Args: + tab_id: The OID of the table where we'll create the constraints. + con_create_arr: A jsonb array defining a constraint creation (must have "type" key; "name", + "not_null", and "default" keys optional). + + +The con_create_arr should have the form: +[ + { + "name": (optional), + "type": , + "columns": [, , ...], + "deferrable": (optional), + "fkey_relation_id": (optional), + "fkey_relation_schema": (optional), + "fkey_relation_name": (optional), + "fkey_columns": [, , ...] (optional), + "fkey_update_action": (optional), + "fkey_delete_action": (optional), + "fkey_match_type": (optional), + }, + { + ... + } +] +If the constraint type is "f", then we require +- fkey_relation_id or (fkey_relation_schema and fkey_relation_name). + +Numeric IDs are preferred over textual ones where both are accepted. +*/ +SELECT array_agg( + ( + -- build the name for the constraint, properly quoted. + quote_ident(con_create_obj ->> 'name'), + -- set the constraint type as a single char. See __msar.build_con_def_text for details. + con_create_obj ->> 'type', + -- Set the column names associated with the constraint. + __msar.get_column_names(tab_id, con_create_obj -> 'columns'), + -- Set whether the constraint is deferrable or not (boolean). + con_create_obj ->> 'deferrable', + -- Build the relation name where the constraint will be applied. Prefer numeric ID. + COALESCE( + __msar.get_qualified_relation_name((con_create_obj -> 'fkey_relation_id')::integer::oid), + __msar.build_qualified_name_sql( + con_create_obj ->> 'fkey_relation_schema', con_create_obj ->> 'fkey_relation_name' + ) + ), + -- Build the array of foreign columns for an fkey constraint. + __msar.get_column_names( + COALESCE( + -- We validate that the given OID (if any) is correct. + (con_create_obj -> 'fkey_relation_id')::integer::oid, + -- If given a schema, name pair, we get the OID from that (and validate it). + msar.get_relation_oid( + con_create_obj ->> 'fkey_relation_schema', con_create_obj ->> 'fkey_relation_name' + ) + ), + con_create_obj -> 'fkey_columns' + ), + -- The below are passed directly. They define some parameters for FOREIGN KEY constraints. + con_create_obj ->> 'fkey_update_action', + con_create_obj ->> 'fkey_delete_action', + con_create_obj ->> 'fkey_match_type', + null -- not yet implemented + )::__msar.con_def +) FROM jsonb_array_elements(con_create_arr) AS x(con_create_obj); +$$ LANGUAGE SQL; + + +CREATE OR REPLACE FUNCTION +__msar.add_constraints(tab_name text, con_defs variadic __msar.con_def[]) + RETURNS TEXT AS $$/* +Add the given constraints to the given table. + +Args: + tab_name: Fully-qualified, quoted table name. + con_defs: The constraints to be added. +*/ +WITH con_cte AS ( + SELECT string_agg('ADD ' || __msar.build_con_def_text(con), ', ') as con_additions + FROM unnest(con_defs) as con +) +SELECT __msar.exec_ddl('ALTER TABLE %s %s', tab_name, con_additions) FROM con_cte; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.add_constraints(tab_id oid, con_defs jsonb) RETURNS oid[] AS $$/* +Add constraints to a table. + +Args: + tab_id: The OID of the table to which we'll add constraints. + col_defs: a JSONB array defining constraints to add. See __msar.process_con_def_jsonb for details. +*/ +DECLARE + con_create_defs __msar.con_def[]; +BEGIN + con_create_defs := __msar.process_con_def_jsonb(tab_id, con_defs); + PERFORM __msar.add_constraints( + __msar.get_qualified_relation_name(tab_id), + variadic con_create_defs + ); + RETURN array_agg(oid) FROM pg_constraint WHERE conrelid=tab_id; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.add_constraints(sch_name text, tab_name text, con_defs jsonb) + RETURNS oid[] AS $$/* +Add constraints to a table. + +Args: + sch_name: unquoted schema name of the table to which we'll add constraints. + tab_name: unquoted, unqualified name of the table to which we'll add constraints. + con_defs: a JSONB array defining constraints to add. See __msar.process_con_def_jsonb for details. +*/ +SELECT msar.add_constraints(msar.get_relation_oid(sch_name, tab_name), con_defs); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +DROP TYPE IF EXISTS __msar.not_null_def CASCADE; +CREATE TYPE __msar.not_null_def AS ( + col_name text, -- The column to be modified, quoted. + not_null boolean -- The value to set for null or not null. +); + + +CREATE OR REPLACE FUNCTION +__msar.set_not_nulls(tab_name text, not_null_defs __msar.not_null_def[]) RETURNS TEXT AS $$/* +Set or drop not null constraints on columns +*/ +WITH not_null_cte AS ( + SELECT string_agg( + CASE + WHEN not_null_def.not_null=true THEN format('ALTER %s SET NOT NULL', not_null_def.col_name) + WHEN not_null_def.not_null=false THEN format ('ALTER %s DROP NOT NULL', not_null_def.col_name) + END, + ', ' + ) AS not_nulls + FROM unnest(not_null_defs) as not_null_def +) +SELECT __msar.exec_ddl('ALTER TABLE %s %s', tab_name, not_nulls) FROM not_null_cte; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.copy_constraint(con_id oid, from_col_id smallint, to_col_id smallint) + RETURNS oid[] AS $$/* +Copy a single constraint associated with a column. + +Given a column with attnum 3 involved in the original constraint, and a column with attnum 4 to be +involved in the constraint copy, and other columns 1 and 2 involved in the constraint, suppose the +original constraint had conkey [1, 2, 3]. The copy constraint should then have conkey [1, 2, 4]. + +For now, this is only implemented for unique constraints. + +Args: + con_id: The oid of the constraint we'll copy. + from_col_id: The column ID to be removed from the original's conkey in the copy. + to_col_id: The column ID to be added to the original's conkey in the copy. +*/ +WITH + con_cte AS (SELECT * FROM pg_constraint WHERE oid=con_id AND contype='u'), + con_def_cte AS ( + SELECT jsonb_agg( + jsonb_build_object( + 'name', null, + 'type', con_cte.contype, + 'columns', array_replace(con_cte.conkey, from_col_id, to_col_id) + ) + ) AS con_def FROM con_cte + ) +SELECT msar.add_constraints(con_cte.conrelid, con_def_cte.con_def) FROM con_cte, con_def_cte; +$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.copy_column( + tab_id oid, col_id smallint, copy_name text, copy_data boolean, copy_constraints boolean +) RETURNS smallint AS $$/* +Copy a column of a table +*/ +DECLARE + col_defs __msar.col_def[]; + tab_name text; + col_name text; + created_col_id smallint; +BEGIN + col_defs := __msar.get_duplicate_col_defs( + tab_id, ARRAY[col_id], ARRAY[copy_name], copy_data + ); + tab_name := __msar.get_qualified_relation_name(tab_id); + col_name := quote_ident(msar.get_column_name(tab_id, col_id)); + PERFORM __msar.add_columns(tab_name, VARIADIC col_defs); + created_col_id := attnum + FROM pg_attribute + WHERE attrelid=tab_id AND quote_ident(attname)=col_defs[1].name_; + IF copy_data THEN + PERFORM __msar.exec_ddl( + 'UPDATE %s SET %s=%s', + tab_name, col_defs[1].name_, quote_ident(msar.get_column_name(tab_id, col_id)) + ); + END IF; + IF copy_constraints THEN + PERFORM msar.copy_constraint(oid, col_id, created_col_id) + FROM pg_constraint + WHERE conrelid=tab_id AND ARRAY[col_id] <@ conkey; + PERFORM __msar.set_not_nulls( + tab_name, ARRAY[(col_defs[1].name_, attnotnull)::__msar.not_null_def] + ) + FROM pg_attribute WHERE attrelid=tab_id AND attnum=col_id; + END IF; + RETURN created_col_id; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.get_extracted_con_def_jsonb(tab_id oid, col_ids integer[]) RETURNS jsonb AS $$/* +Get a JSON array of constraint definitions from given columns for creation of an extracted table. + +See the __msar.process_con_def_jsonb for a description of the JSON. + +Args: + tab_id: The OID of the table containing the constraints whose definitions we want. + col_ids: The attnum of columns with the constraints whose definitions we want. +*/ + +SELECT jsonb_agg( + jsonb_build_object( + 'type', contype, + 'columns', ARRAY[attname], + 'deferrable', condeferrable, + 'fkey_relation_id', confrelid::bigint, + 'fkey_columns', coalesce(confkey, ARRAY[]::smallint[]), + 'fkey_update_action', confupdtype, + 'fkey_delete_action', confdeltype, + 'fkey_match_type', confmatchtype + ) +) +FROM pg_constraint + JOIN unnest(col_ids) AS columns_to_copy(col_id) ON pg_constraint.conkey[1]=columns_to_copy.col_id + JOIN pg_attribute + ON pg_attribute.attnum=columns_to_copy.col_id AND pg_attribute.attrelid=pg_constraint.conrelid +WHERE pg_constraint.conrelid=tab_id AND (pg_constraint.contype='f' OR pg_constraint.contype='u'); +$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- MATHESAR DROP TABLE FUNCTIONS +-- +-- Drop a table. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +-- Drop table -------------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +__msar.drop_table(tab_name text, cascade_ boolean, if_exists boolean) RETURNS text AS $$/* +Drop a table, returning the command executed. + +Args: + tab_name: The qualified, quoted name of the table we will drop. + cascade_: Whether to add CASCADE. + if_exists_: Whether to ignore an error if the table doesn't exist +*/ +DECLARE + cmd_template TEXT; +BEGIN + IF if_exists + THEN + cmd_template := 'DROP TABLE IF EXISTS %s'; + ELSE + cmd_template := 'DROP TABLE %s'; + END IF; + IF cascade_ + THEN + cmd_template = cmd_template || ' CASCADE'; + END IF; + RETURN __msar.exec_ddl(cmd_template, tab_name); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.drop_table(tab_id oid, cascade_ boolean) RETURNS text AS $$/* +Drop a table, returning the fully qualified name of the dropped table. + +Args: + tab_id: The OID of the table to drop + cascade_: Whether to drop dependent objects. +*/ +DECLARE relation_name text; +BEGIN + relation_name := __msar.get_qualified_relation_name_or_null(tab_id); + -- if_exists doesn't work while working with oids because + -- the SQL query gets parameterized with tab_id instead of relation_name + -- since we're unable to find the relation_name for a non existing table. + PERFORM __msar.drop_table(relation_name, cascade_, if_exists => false); + RETURN relation_name; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.drop_table(sch_name text, tab_name text, cascade_ boolean, if_exists boolean) + RETURNS text AS $$/* +Drop a table, returning the command executed. + +Args: + sch_name: The schema of the table to drop. + tab_name: The name of the table to drop. + cascade_: Whether to drop dependent objects. + if_exists_: Whether to ignore an error if the table doesn't exist +*/ +DECLARE qualified_tab_name text; +BEGIN + qualified_tab_name := __msar.build_qualified_name_sql(sch_name, tab_name); + RETURN __msar.drop_table(qualified_tab_name, cascade_, if_exists); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- MATHESAR DROP CONSTRAINT FUNCTIONS +-- +-- Drop a constraint. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +msar.drop_constraint(sch_name text, tab_name text, con_name text) RETURNS TEXT AS $$/* +Drop a constraint + +Args: + sch_name: The name of the schema where the table with constraint to be dropped resides, unquoted. + tab_name: The name of the table that has the constraint to be dropped, unquoted. + con_name: Name of the constraint to drop, unquoted. +*/ +BEGIN + EXECUTE format('ALTER TABLE %I.%I DROP CONSTRAINT %I', sch_name, tab_name, con_name); + RETURN con_name; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.drop_constraint(tab_id oid, con_id oid) RETURNS TEXT AS $$/* +Drop a constraint + +Args: + tab_id: OID of the table that has the constraint to be dropped. + con_id: OID of the constraint to be dropped. +*/ +BEGIN + RETURN msar.drop_constraint( + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + msar.get_constraint_name(con_id) + ); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- Create Mathesar table function + +CREATE OR REPLACE FUNCTION +__msar.add_table(tab_name text, col_defs __msar.col_def[], con_defs __msar.con_def[]) + RETURNS text AS $$/* +Add a table, returning the command executed. + +Args: + tab_name: A qualified & quoted name for the table to be added. + col_defs: An array of __msar.col_def defining the column set of the new table. + con_defs (optional): An array of __msar.con_def defining the constraints for the new table. + +Note: Even if con_defs is null, there can be some column-level constraints set in col_defs. +*/ +WITH col_cte AS ( + SELECT string_agg(__msar.build_col_def_text(col), ', ') AS table_columns + FROM unnest(col_defs) AS col +), con_cte AS ( + SELECT string_agg(__msar.build_con_def_text(con), ', ') AS table_constraints + FROM unnest(con_defs) as con +) +SELECT __msar.exec_ddl( + 'CREATE TABLE %s (%s)', + tab_name, + concat_ws(', ', table_columns, table_constraints) +) +FROM col_cte, con_cte; +$$ LANGUAGE SQL; + +-- Drop function defined in Mathesar 0.1.7 with different argument names +DROP FUNCTION IF EXISTS msar.add_mathesar_table(oid, text, jsonb, jsonb, text); + +CREATE OR REPLACE FUNCTION +msar.add_mathesar_table(sch_id oid, tab_name text, col_defs jsonb, con_defs jsonb, own_id regrole, comment_ text) + RETURNS jsonb AS $$/* +Add a table, with a default id column, returning the OID & name of the created table. + +Args: + sch_id: The OID of the schema where the table will be created. + tab_name (optional): The unquoted name for the new table. + col_defs (optional): The columns for the new table, in order. + con_defs (optional): The constraints for the new table. + own_id (optional): The OID of the role who will own the new table. + comment_ (optional): The comment for the new table. + +Note: + - If tab_name is NULL, the table will be created with a name in the format 'Table '. + - If col_defs is NULL, the table will still be created with a default 'id' column. Also, + if an 'id' column is given in the input, it will be replaced with our default 'id' column. This is + the behavior of the current python functions, so we're keeping it for now. In any case, the created + table will always have our default 'id' column as its first column. + - If own_id is NULL, the current role will be the owner of the new table. +*/ +DECLARE + schema_name text; + table_count integer; + prefix text; + uq_table_name text; + fq_table_name text; + created_table_id oid; + column_defs __msar.col_def[]; + constraint_defs __msar.con_def[]; +BEGIN + schema_name := msar.get_schema_name(sch_id); + IF NULLIF(tab_name, '') IS NOT NULL AND NOT EXISTS( + SELECT oid FROM pg_catalog.pg_class WHERE relname = tab_name AND relnamespace = sch_id + ) + THEN + fq_table_name := format('%I.%I', schema_name, tab_name); + ELSE + -- determine what prefix to use for table name generation + IF NULLIF(tab_name, '') IS NOT NULL THEN + prefix := tab_name || ' '; + ELSE + prefix := 'Table '; + END IF; + -- generate a table name if one doesn't exist + SELECT COUNT(*) + 1 INTO table_count + FROM pg_catalog.pg_class + WHERE relkind = 'r' AND relnamespace = sch_id; + uq_table_name := prefix || table_count; + -- avoid name collisions + WHILE EXISTS ( + SELECT oid FROM pg_catalog.pg_class WHERE relname = uq_table_name AND relnamespace = sch_id + ) LOOP + table_count := table_count + 1; + uq_table_name := prefix || table_count; + END LOOP; + fq_table_name := format('%I.%I', schema_name, uq_table_name); + END IF; + column_defs := __msar.process_col_def_jsonb(0, col_defs, false, true); + constraint_defs := __msar.process_con_def_jsonb(0, con_defs); + PERFORM __msar.add_table(fq_table_name, column_defs, constraint_defs); + created_table_id := fq_table_name::regclass::oid; + PERFORM msar.comment_on_table(created_table_id, comment_); + IF own_id IS NOT NULL THEN + PERFORM msar.transfer_table_ownership(created_table_id, own_id); + END IF; + RETURN jsonb_build_object( + 'oid', created_table_id::bigint, + 'name', relname + ) FROM pg_catalog.pg_class WHERE oid = created_table_id; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.prepare_table_for_import( + sch_id oid, + tab_name text, + col_defs jsonb, + header boolean, + delimiter text, + escapechar text, + quotechar text, + encoding_ text, + comment_ text +) RETURNS jsonb AS $$/* +Add a table, with a default id column, returning a JSON object containing +a properly formatted SQL statement to carry out `COPY FROM`, table_oid & table_name of the created table. + +Each returned JSON object will have the form: + { + "copy_sql": , + "table_oid": , + "table_name": + } + +Args: + sch_id: The OID of the schema where the table will be created. + tab_name (optional): The unquoted name for the new table. + col_defs: The columns for the new table, in order. + header: Whether or not the file contains a header line with the names of each column in the file. + delimiter: The character that separates columns within each row (line) of the file. + escapechar: The character that should appear before a data character that matches the `quotechar` value. + quotechar: The quoting character to be used when a data value is quoted. + encoding_: The encoding in which the file is encoded. + comment_ (optional): The comment for the new table. +*/ +DECLARE + sch_name text; + rel_name text; + rel_id oid; + col_names_sql text; + options_sql text; + copy_sql text; +BEGIN + -- Create string table + rel_id := msar.add_mathesar_table(sch_id, tab_name, col_defs, NULL, NULL, comment_) ->> 'oid'; + -- Get unquoted schema and table name for the created table + SELECT nspname, relname INTO sch_name, rel_name + FROM pg_catalog.pg_class AS pgc + LEFT JOIN pg_catalog.pg_namespace AS pgn + ON pgc.relnamespace = pgn.oid + WHERE pgc.oid = rel_id; + -- Aggregate TEXT type column names of the created table + SELECT string_agg(quote_ident(attname), ', ') INTO col_names_sql + FROM pg_catalog.pg_attribute + WHERE attrelid = rel_id AND atttypid = 'TEXT'::regtype::oid; + -- Form a substring for COPY related options + options_sql := concat_ws( + ' ', + CASE WHEN header THEN 'HEADER' END, + CASE WHEN NULLIF(delimiter, '') IS NOT NULL THEN 'DELIMITER ' || quote_literal(delimiter) END, + CASE WHEN NULLIF(escapechar, '') IS NOT NULL THEN 'ESCAPE ' || quote_literal(escapechar) END, + CASE WHEN NULLIF(quotechar, '') IS NOT NULL THEN 'QUOTE ' || quote_literal(quotechar) END, + CASE WHEN NULLIF(encoding_, '') IS NOT NULL THEN 'ENCODING '|| quote_literal(encoding_) END + ); + -- Create a properly formatted COPY SQL string + copy_sql := format('COPY %I.%I (%s) FROM STDIN CSV %s', sch_name, rel_name, col_names_sql, options_sql); + RETURN jsonb_build_object( + 'copy_sql', copy_sql, + 'table_oid', rel_id::bigint, + 'table_name', relname + ) FROM pg_catalog.pg_class WHERE oid = rel_id; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.get_preview( + tab_id oid, + col_cast_def jsonb, + rec_limit integer +) RETURNS jsonb AS $$/* +Preview a table, applying different type casts and options to the underlying columns before import, +returning a JSON object describing the records of the table. + +Note that these casts are temporary and do not alter the data in the underlying table, +if you wish to alter these settings permanantly for the columns see msar.alter_columns. + +Args: + tab_id: The OID of the table to preview. + col_cast_def: A JSON object describing the column settings to apply. + rec_limit (optional): The upper limit for the number of records to return. + +The col_cast_def JSONB should have the form: +[ + { + "attnum": , + "type": { + "name": , + "options": { + "length": , + "precision": , + "scale": + "fields": , + "array": + } + }, + }, + { + ... + }, + ... +] +*/ +DECLARE + tab_name text; + sel_query text; + records jsonb; +BEGIN + tab_name := __msar.get_qualified_relation_name(tab_id); + sel_query := 'SELECT %s FROM %s LIMIT %L'; + WITH preview_cte AS ( + SELECT string_agg( + 'CAST(' || + __msar.build_cast_expr( + quote_ident(msar.get_column_name(tab_id, (col_cast ->> 'attnum')::integer)), col_cast -> 'type' ->> 'name' + ) || + ' AS ' || + msar.build_type_text(col_cast -> 'type') || + ')'|| ' AS ' || quote_ident(msar.get_column_name(tab_id, (col_cast ->> 'attnum')::integer)), + ', ' + ) AS cast_expr + FROM jsonb_array_elements(col_cast_def) AS col_cast + ) + SELECT + __msar.exec_dql(sel_query, cast_expr, tab_name, rec_limit::text) + INTO records FROM preview_cte; + RETURN records; +END; +$$ LANGUAGE plpgsql; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- COLUMN ALTERATION FUNCTIONS +-- +-- Functions in this section should be related to altering columns' names, types, and constraints. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +-- Rename columns ---------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +__msar.rename_column(tab_name text, old_col_name text, new_col_name text) RETURNS text AS $$/* +Change a column name, returning the command executed + +Args: + tab_name: The qualified, quoted name of the table where we'll change a column name + old_col_name: The quoted name of the column to change. + new_col_name: The quoted new name for the column. +*/ +DECLARE + cmd_template text; +BEGIN + cmd_template := 'ALTER TABLE %s RENAME COLUMN %s TO %s'; + IF old_col_name <> new_col_name THEN + RETURN __msar.exec_ddl(cmd_template, tab_name, old_col_name, new_col_name); + ELSE + RETURN null; + END IF; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.rename_column(tab_id oid, col_id integer, new_col_name text) RETURNS smallint AS $$/* +Change a column name, returning the command executed + +Args: + tab_id: The OID of the table whose column we're renaming + col_id: The ID of the column to rename + new_col_name: The unquoted new name for the column. +*/ +BEGIN + PERFORM __msar.rename_column( + tab_name => __msar.get_qualified_relation_name(tab_id), + old_col_name => quote_ident(msar.get_column_name(tab_id, col_id)), + new_col_name => quote_ident(new_col_name) + ); + RETURN col_id; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION __msar.build_cast_expr(val text, type_ text) RETURNS text AS $$/* +Build an expression for casting a column in Mathesar, returning the text of that expression. + +We fall back silently to default casting behavior if the mathesar_types namespace is missing. +However, we do throw an error in cases where the schema exists, but the type casting function +doesn't. This is assumed to be an error the user should know about. + +Args: + val: This is quite general, and isn't sanitized in any way. It can be either a literal or a column + identifier, since we want to be able to produce a casting expression in either case. + type_: This type name string must cast properly to a regtype. +*/ +SELECT CASE + WHEN msar.schema_exists('mathesar_types') THEN + msar.get_cast_function_name(type_::regtype) || '(' || val || ')' + ELSE + val || '::' || type_::regtype::text +END; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_cast_expr(tab_id regclass, col_id smallint, typ_id regtype) RETURNS text AS $$/* +Build an expression for casting a column in Mathesar, returning the text of that expression. + +We fall back silently to default casting behavior if the mathesar_types namespace is missing. +However, we do throw an error in cases where the schema exists, but the type casting function +doesn't. This is assumed to be an error the user should know about. + +Args: + tab_id: The OID of the table whose column we're casting. + col_id: The attnum of the column in the table. + typ_id: The OID of the type we will cast to. +*/ +SELECT CASE + WHEN msar.schema_exists('mathesar_types') THEN + msar.get_cast_function_name(typ_id) + || '(' + || format('%I', msar.get_column_name(tab_id, col_id)) + || ')' + ELSE + format('%I', msar.get_column_name(tab_id, col_id)) || '::' || typ_id::text +END; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.infer_column_data_type(tab_id regclass, col_id smallint) RETURNS regtype AS $$/* +Infer the best type for a given column. + +Note that we currently only try for `text` columns, since we only do this at import. I.e., +if the column is some other type we just return that original type. + +Args: + tab_id: The OID of the table of the column whose type we're inferring. + col_id: The attnum of the column whose type we're inferring. +*/ +DECLARE + inferred_type regtype; + infer_sequence_raw text[] := ARRAY[ + 'boolean', + 'date', + 'numeric', + 'mathesar_types.mathesar_money', + 'timestamp without time zone', + 'timestamp with time zone', + 'time without time zone', + 'interval', + 'mathesar_types.email', + 'mathesar_types.mathesar_json_array', + 'mathesar_types.mathesar_json_object', + 'mathesar_types.uri' + ]; + infer_sequence regtype[]; + column_nonempty boolean; + test_type regtype; +BEGIN + infer_sequence := array_agg(pg_catalog.to_regtype(t)) + FILTER (WHERE pg_catalog.to_regtype(t) IS NOT NULL) + FROM unnest(infer_sequence_raw) AS x(t); + EXECUTE format( + 'SELECT EXISTS (SELECT 1 FROM %1$I.%2$I WHERE %3$I IS NOT NULL)', + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + msar.get_column_name(tab_id, col_id) + ) INTO column_nonempty; + inferred_type := atttypid FROM pg_catalog.pg_attribute WHERE attrelid=tab_id AND attnum=col_id; + IF inferred_type = 'text'::regtype AND column_nonempty THEN + FOREACH test_type IN ARRAY infer_sequence + LOOP + BEGIN + EXECUTE format( + 'EXPLAIN ANALYZE SELECT %1$s FROM %2$I.%3$I', + msar.build_cast_expr(tab_id, col_id, test_type), + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id) + ); + inferred_type := test_type; + EXIT; + EXCEPTION WHEN OTHERS THEN + RAISE NOTICE 'Test failed: %', format( + 'EXPLAIN ANALYZE SELECT %1$s FROM %2$I.%3$I', + msar.build_cast_expr(tab_id, col_id, test_type), + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id) + ); + -- do nothing, just try the next type. + END; + END LOOP; + END IF; + RETURN inferred_type; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.infer_table_column_data_types(tab_id regclass) RETURNS jsonb AS $$/* +Infer the best type for each column in the table. + +Currently we only suggest different types for columns which originate as type `text`. + +Args: + tab_id: The OID of the table whose columns we're inferring types for. + +The response JSON will have attnum keys, and values will be the result of `format_type` +for the inferred type of each column. Restricted to columns to which the user has access. +*/ +SELECT jsonb_object_agg( + attnum, pg_catalog.format_type(msar.infer_column_data_type(attrelid, attnum), null) +) +FROM pg_catalog.pg_attribute +WHERE + attrelid = tab_id + AND attnum > 0 + AND NOT attisdropped + AND has_column_privilege(attrelid, attnum, 'SELECT'); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.build_col_drop_default_expr(tab_id oid, col_id integer, new_type text, new_default jsonb) + RETURNS TEXT AS $$/* +Build an expression for dropping a column's default, returning the text of that expression. + +This function is private, and not general: It builds an expression in the context of the +msar.process_col_alter_jsonb function and should not otherwise be called independently, since it has +logic specific to that context. In that setting, we drop the default for the specified column if the +caller specifies that we're setting a new_default of NULL, or if we're changing the type of the +column. + +Args: + tab_id: The OID of the table where the column with the default to be dropped lives. + col_id: The attnum of the column with the undesired default. + new_type: This gives the function context letting it know whether to drop the default or not. If + we are setting a new type for the column, we will always drop the default first. + new_default: This also gives us context letting us know whether to drop the default. By setting + the 'new_default' to (jsonb) null, the caller specifies that we should drop the + column's default. +*/ +SELECT CASE WHEN new_type IS NOT NULL OR jsonb_typeof(new_default)='null' THEN + 'ALTER COLUMN ' || quote_ident(msar.get_column_name(tab_id, col_id)) || ' DROP DEFAULT' + END; +$$ LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION +__msar.build_col_retype_expr(tab_id oid, col_id integer, new_type text) RETURNS text AS $$/* +Build an expression to change a column's type, returning the text of that expression. + +Note that this function wraps the type alteration in a cast expression. If we have the custom +mathesar_types cast functions available, we prefer those to the default PostgreSQL casting behavior. + +Args: + tab_id: The OID of the table containing the column whose type we'll alter. + col_id: The attnum of the column whose type we'll alter. + new_type: The target type to which we'll alter the column. +*/ +SELECT 'ALTER COLUMN ' + || quote_ident(msar.get_column_name(tab_id, col_id)) + || ' TYPE ' + || new_type + || ' USING ' + || __msar.build_cast_expr(quote_ident(msar.get_column_name(tab_id, col_id)), new_type); +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION __msar.build_col_default_expr( + tab_id oid, + col_id integer, + old_default text, + new_default jsonb, + new_type text +) RETURNS text AS $$/* +Build an expression to set a column's default value, returning the text of that expression. + +This function is private, and not general. The expression it builds is in the context of the calling +msar.process_col_alter_jsonb function. In particular, this function can also reset the original +default after a column type alteration, but cast to the new type of the column. We also avoid +setting a new default in cases where the new default argument is (sql) NULL, or a JSONB null. + +Args: + tab_id: The OID of the table containing the column whose default we'll alter. + col_id: The attnum of the column whose default we'll alter. + old_default: The current default. In some cases in the context of the caller, we want to reset the + original default, but cast to a new type. + new_default: The new desired default. It's left as JSONB since we are using JSONB 'null' values to + represent 'drop the column default'. + new_type: The target type to which we'll cast the new default. +*/ +DECLARE + default_expr text; + raw_default_expr text; +BEGIN + -- In this case, we assume the intent is to clear out the original default. + IF jsonb_typeof(new_default)='null' THEN + default_expr := null; + -- We get the root JSONB value as text if it exists. + ELSEIF new_default #>> '{}' IS NOT NULL THEN + default_expr := format('%L', new_default #>> '{}'); -- sanitize since this could be user input. + -- At this point, we know we're not setting a new default, or dropping the old one. + -- So, we check whether the original default is potentially dynamic, and whether we need to cast + -- it to a new type. + ELSEIF msar.is_default_possibly_dynamic(tab_id, col_id) AND new_type IS NOT NULL THEN + -- We add casting the possibly dynamic expression to the new type as part of the default + -- expression in this case. + default_expr := __msar.build_cast_expr(old_default, new_type); + ELSEIF old_default IS NOT NULL AND new_type IS NOT NULL THEN + -- If we arrive here, then we know the old_default is a constant value, and we want to cast the + -- old default value to the new type *before* setting it as the new default. This avoids + -- building up nested cast functions in the default expression. + -- The first step is to execute the cast expression, putting the result into a new variable. + EXECUTE format('SELECT %s', __msar.build_cast_expr(old_default, new_type)) + INTO raw_default_expr; + -- Then we format that new variable's value as a literal. + default_expr := format('%L', raw_default_expr); + END IF; + RETURN + format('ALTER COLUMN %I SET DEFAULT ', msar.get_column_name(tab_id, col_id)) || default_expr; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +__msar.build_col_not_null_expr(tab_id oid, col_id integer, not_null boolean) RETURNS text AS $$/* +Build an expression to alter a column's NOT NULL setting, returning the text of that expression. + +Args: + tab_id: The OID of the table containing the column whose nullability we'll alter. + col_id: The attnum of the column whose nullability we'll alter. + not_null: If true, we 'SET NOT NULL'. If false, we 'DROP NOT NULL' if null, we do nothing. +*/ +SELECT 'ALTER COLUMN ' + || quote_ident(msar.get_column_name(tab_id, col_id)) + || CASE WHEN not_null THEN ' SET ' ELSE ' DROP ' END + || 'NOT NULL'; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +__msar.build_col_drop_text(tab_id oid, col_id integer, col_delete boolean) RETURNS text AS $$/* +Build an expression to drop a column from a table, returning the text of that expression. + +Args: + tab_id: The OID of the table containing the column whose nullability we'll alter. + col_id: The attnum of the column whose nullability we'll alter. + col_delete: If true, we drop the column. If false or null, we do nothing. +*/ +SELECT CASE WHEN col_delete THEN 'DROP COLUMN ' || quote_ident(msar.get_column_name(tab_id, col_id)) END; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.process_col_alter_jsonb(tab_id oid, col_alters jsonb) RETURNS text AS $$/* +Turn a JSONB array representing a set of desired column alterations into a text expression. + +Args: + tab_id The OID of the table whose columns we'll alter. + col_alters: a JSONB array defining the list of column alterations. + +The col_alters JSONB should have the form: +[ + { + "attnum": , + "type": (optional), + "default": (optional), + "not_null": (optional), + "delete": (optional), + "name": (optional), + }, + { + ... + }, + ... +] + +Notes on the col_alters JSONB +- For more info about the type object, see the msar.build_type_text function. +- The "name" key isn't used in this function; it's included here for completeness. +- A possible 'gotcha' is the "default" key. + - If omitted, no change to the default for the given column will occur, other than to cast it to + the new type if a type change is specified. + - If, on the other hand, the "default" key is set to an explicit value of null, then we will + interpret that as a directive to set the column's default to NULL, i.e., we'll drop the current + default setting. +- If the column is a default mathesar ID column, we will silently skip it so it won't be altered. +*/ +WITH prepped_alters AS ( + SELECT + tab_id, + (col_alter_obj ->> 'attnum')::integer AS col_id, + msar.build_type_text_complete(col_alter_obj -> 'type', format_type(atttypid, null)) AS new_type, + -- We get the old default expression from a catalog table before modifying anything, so we can + -- reset it properly if we alter the column type. + pg_get_expr(adbin, tab_id) old_default, + col_alter_obj -> 'default' AS new_default, + (col_alter_obj -> 'not_null')::boolean AS not_null, + (col_alter_obj -> 'delete')::boolean AS delete_ + FROM + (SELECT tab_id) as arg, + jsonb_array_elements(col_alters) as t(col_alter_obj) + INNER JOIN pg_attribute ON (t.col_alter_obj ->> 'attnum')::smallint=attnum AND tab_id=attrelid + LEFT JOIN pg_attrdef ON (t.col_alter_obj ->> 'attnum')::smallint=adnum AND tab_id=adrelid + WHERE NOT msar.is_mathesar_id_column(tab_id, (t.col_alter_obj ->> 'attnum')::integer) +) +SELECT string_agg( + nullif( + concat_ws( + ', ', + __msar.build_col_drop_default_expr(tab_id, col_id, new_type, new_default), + __msar.build_col_retype_expr(tab_id, col_id, new_type), + __msar.build_col_default_expr(tab_id, col_id, old_default, new_default, new_type), + __msar.build_col_not_null_expr(tab_id, col_id, not_null), + __msar.build_col_drop_text(tab_id, col_id, delete_) + ), + '' + ), + ', ' +) +FROM prepped_alters; +$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.alter_columns(tab_id oid, col_alters jsonb) RETURNS integer[] AS $$/* +Alter columns of the given table in bulk, returning the IDs of the columns so altered. + +Args: + tab_id: The OID of the table whose columns we'll alter. + col_alters: a JSONB describing the alterations to make. + +For the specification of the col_alters JSONB, see the msar.process_col_alter_jsonb function. + +Note that all alterations except renaming are done in bulk, and then all name changes are done one +at a time afterwards. This is because the SQL design specifies at most one name-changing clause per +query. +*/ +DECLARE + r RECORD; + col_alter_str TEXT; + description_alter RECORD; +BEGIN + -- Get the string specifying all non-name-change alterations to perform. + col_alter_str := msar.process_col_alter_jsonb(tab_id, col_alters); + + -- Perform the non-name-change alterations + IF col_alter_str IS NOT NULL THEN + PERFORM __msar.exec_ddl( + 'ALTER TABLE %s %s', + __msar.get_qualified_relation_name(tab_id), + msar.process_col_alter_jsonb(tab_id, col_alters) + ); + END IF; + + -- Here, we perform all description-changing alterations. + FOR description_alter IN + SELECT + (col_alter->>'attnum')::integer AS col_id, + col_alter->>'description' AS comment_ + FROM jsonb_array_elements(col_alters) AS col_alter + WHERE __msar.jsonb_key_exists(col_alter, 'description') + LOOP + PERFORM msar.comment_on_column( + tab_id := tab_id, + col_id := description_alter.col_id, + comment_ := description_alter.comment_ + ); + END LOOP; + + -- Here, we perform all name-changing alterations. + FOR r in SELECT attnum, name FROM jsonb_to_recordset(col_alters) AS x(attnum integer, name text) + LOOP + PERFORM msar.rename_column(tab_id, r.attnum, r.name); + END LOOP; + RETURN array_agg(x.attnum) FROM jsonb_to_recordset(col_alters) AS x(attnum integer); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- Comment on column ------------------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +__msar.comment_on_column( + tab_name text, + col_name text, + comment_ text +) RETURNS text AS $$/* +Change the description of a column, returning command executed. If comment_ is NULL, column's +comment is removed. + +Args: + tab_name: The name of the table containg the column whose comment we will change. + col_name: The name of the column whose comment we'll change + comment_: The new comment. Any quotes or special characters must be escaped. +*/ +DECLARE + comment_or_null text := COALESCE(comment_, 'NULL'); +BEGIN +RETURN __msar.exec_ddl( + 'COMMENT ON COLUMN %s.%s IS %s', + tab_name, + col_name, + comment_or_null +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +msar.comment_on_column( + sch_name text, + tab_name text, + col_name text, + comment_ text +) RETURNS text AS $$/* +Change the description of a column, returning command executed. + +Args: + sch_name: The schema of the table whose column's comment we will change. + tab_name: The name of the table whose column's comment we will change. + col_name: The name of the column whose comment we will change. + comment_: The new comment. +*/ +SELECT __msar.comment_on_column( + __msar.build_qualified_name_sql(sch_name, tab_name), + quote_ident(col_name), + quote_literal(comment_) +); +$$ LANGUAGE SQL; + + +CREATE OR REPLACE FUNCTION +__msar.comment_on_column( + tab_id oid, + col_id integer, + comment_ text +) RETURNS text AS $$/* +Change the description of a column, returning command executed. + +Args: + tab_id: The OID of the table containg the column whose comment we will change. + col_id: The ATTNUM of the column whose comment we will change. + comment_: The new comment. +*/ +SELECT __msar.comment_on_column( + __msar.get_qualified_relation_name(tab_id), + quote_ident(msar.get_column_name(tab_id, col_id)), + comment_ +); +$$ LANGUAGE SQL; + + +CREATE OR REPLACE FUNCTION +msar.comment_on_column( + tab_id oid, + col_id integer, + comment_ text +) RETURNS text AS $$/* +Change the description of a column, returning command executed. + +Args: + tab_id: The OID of the table containg the column whose comment we will change. + col_id: The ATTNUM of the column whose comment we will change. + comment_: The new comment. +*/ +SELECT __msar.comment_on_column( + tab_id, + col_id, + quote_literal(comment_) +); +$$ LANGUAGE SQL; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- MATHESAR LINK FUNCTIONS +-- +-- Add a link to the table. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +-- Create a Many-to-One or a One-to-One link ------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +msar.add_foreign_key_column( + col_name text, + rel_id oid, + frel_id oid, + unique_link boolean DEFAULT false +) RETURNS smallint AS $$/* +Create a many-to-one or a one-to-one link between tables, returning the attnum of the newly created +column, returning the attnum of the added column. + +Args: + col_name: Name of the new column to be created in the referrer table, unquoted. + rel_id: The OID of the referrer table, named for conrelid in the pg_attribute table. + frel_id: The OID of the referent table, named for confrelid in the pg_attribute table. + unique_link: Whether to make the link one-to-one instead of many-to-one. +*/ +DECLARE + pk_col_id smallint; + col_defs jsonb; + added_col_ids smallint[]; + con_defs jsonb; +BEGIN + pk_col_id := msar.get_pk_column(frel_id); + col_defs := jsonb_build_array( + jsonb_build_object( + 'name', col_name, + 'type', jsonb_build_object('name', msar.get_column_type(frel_id, pk_col_id)) + ) + ); + added_col_ids := msar.add_columns(rel_id , col_defs , false); + con_defs := jsonb_build_array( + jsonb_build_object( + 'name', null, + 'type', 'f', + 'columns', added_col_ids, + 'deferrable', false, + 'fkey_relation_id', frel_id::integer, + 'fkey_columns', jsonb_build_array(pk_col_id) + ) + ); + IF unique_link THEN + con_defs := jsonb_build_array( + jsonb_build_object( + 'name', null, + 'type', 'u', + 'columns', added_col_ids) + ) || con_defs; + END IF; + PERFORM msar.add_constraints(rel_id , con_defs); + RETURN added_col_ids[1]; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +-- Create a Many-to-Many link ---------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +msar.add_mapping_table( + sch_id oid, + tab_name text, + mapping_columns jsonb +) RETURNS oid AS $$/* +Create a many-to-many link between tables, returning the oid of the newly created table. + +Args: + sch_id: The OID of the schema in which new referrer table is to be created. + tab_name: Name of the referrer table to be created. + mapping_columns: An array of objects giving the foreign key columns for the new table. + +The elements of the mapping_columns array must have the form + {"column_name": , "referent_table_oid": } + +*/ +DECLARE + added_table_id oid; +BEGIN + added_table_id := msar.add_mathesar_table(sch_id, tab_name, NULL, NULL, NULL, NULL) ->> 'oid'; + PERFORM msar.add_foreign_key_column(column_name, added_table_id, referent_table_oid) + FROM jsonb_to_recordset(mapping_columns) AS x(column_name text, referent_table_oid oid); + RETURN added_table_id; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- TABLE SPLITTING FUNCTIONS +-- +-- Functions to extract columns from a table +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION +msar.extract_columns_from_table( + tab_id oid, col_ids integer[], new_tab_name text, fk_col_name text +) RETURNS jsonb AS $f$/* +Extract columns from a table to create a new table, linked by a foreign key. + +Args: + tab_id: The OID of the table whose columns we'll extract + col_ids: An array of the attnums of the columns to extract + new_tab_name: The name of the new table to be made from the extracted columns, unquoted + fk_col_name: The name to give the new foreign key column in the remainder table (optional) + +The extraction takes a set of columns from the table, and creates a new table from the set of +*distinct* tuples those columns comprise. We also add a new foreign key column to the original + (remainder) table that links it to the new extracted table so they can be easily rejoined. The + extracted columns are removed from the remainder table. +*/ +DECLARE + extracted_col_defs CONSTANT jsonb := msar.get_extracted_col_def_jsonb(tab_id, col_ids); + extracted_con_defs CONSTANT jsonb := msar.get_extracted_con_def_jsonb(tab_id, col_ids); + fkey_name CONSTANT text := msar.build_unique_fkey_column_name(tab_id, fk_col_name, new_tab_name); + extracted_table_id integer; + fkey_attnum integer; +BEGIN + -- Begin by creating a new table with column definitions matching the extracted columns. + extracted_table_id := msar.add_mathesar_table( + msar.get_relation_namespace_oid(tab_id), + new_tab_name, + extracted_col_defs, + extracted_con_defs, + NULL, -- own_id is set to NULL so the current role would be the owner of the extracted table. + format('Extracted from %s', __msar.get_qualified_relation_name(tab_id)) + ) ->> 'oid'; + -- Create a new fkey column and foreign key linking the original table to the extracted one. + fkey_attnum := msar.add_foreign_key_column(fkey_name, tab_id, extracted_table_id); + -- Insert the data from the original table's columns into the extracted columns, and add + -- appropriate fkey values to the new fkey column in the original table to give the proper + -- mapping. + PERFORM __msar.exec_ddl($t$ + WITH fkey_cte AS ( + SELECT id, %1$s, dense_rank() OVER (ORDER BY %1$s) AS __msar_tmp_id + FROM %2$s + ), ins_cte AS ( + INSERT INTO %3$s (%1$s) + SELECT DISTINCT %1$s FROM fkey_cte ORDER BY %1$s + ) + UPDATE %2$s SET %4$I=__msar_tmp_id FROM fkey_cte WHERE + %2$s.id=fkey_cte.id + $t$, + -- %1$s This is a comma separated string of the extracted column names + string_agg(quote_ident(col_def ->> 'name'), ', '), + -- %2$s This is the name of the original (remainder) table + __msar.get_qualified_relation_name(tab_id), + -- %3$s This is the new extracted table name + __msar.get_qualified_relation_name(extracted_table_id), + -- %4$I This is the name of the fkey column in the remainder table. + fkey_name + ) FROM jsonb_array_elements(extracted_col_defs) AS col_def; + -- Drop the original versions of the extracted columns from the original table. + PERFORM msar.drop_columns(tab_id, variadic col_ids); + -- In case the user wanted to give a name to the fkey column matching one of the extracted + -- columns, perform that operation now (since the original will now be dropped from the original + -- table) + IF fk_col_name IS NOT NULL AND fk_col_name IN ( + SELECT col_def ->> 'name' + FROM jsonb_array_elements(extracted_col_defs) AS col_def + ) THEN + PERFORM msar.rename_column(tab_id, fkey_attnum, fk_col_name); + END IF; + RETURN jsonb_build_array(extracted_table_id, fkey_attnum); +END; +$f$ LANGUAGE plpgsql; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- COLUMN MOVING FUNCTIONS +-- +-- Functions to move columns between linked tables +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +msar.build_all_columns_expr(tab_id regclass) RETURNS text AS $$/* +*/ +SELECT string_agg( + format( + '%1$I.%2$I.%3$I AS %3$I', + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + attname + ), ', ' +) +FROM pg_catalog.pg_attribute +WHERE + attrelid = tab_id + AND attnum > 0 + AND NOT attisdropped; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_columns_expr(tab_id regclass, col_ids smallint[]) RETURNS text AS $$/* +*/ +SELECT string_agg( + format( + '%1$I.%2$I.%3$I AS %3$I', + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + attname + ), ', ' +) +FROM pg_catalog.pg_attribute JOIN unnest(col_ids) x(a) ON attnum = x.a +WHERE + attrelid = tab_id; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_unqualified_columns_expr(tab_id regclass, col_ids smallint[]) RETURNS text AS $$/* +*/ +SELECT string_agg(format('%I', attname), ', ') +FROM pg_catalog.pg_attribute JOIN unnest(col_ids) x(a) ON attnum = x.a +WHERE + attrelid = tab_id; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.get_other_column_ids(tab_id regclass, col_ids smallint[]) RETURNS smallint[] AS $$ +SELECT array_agg(attnum) +FROM pg_catalog.pg_attribute +WHERE + attrelid = tab_id + AND attnum > 0 + AND NOT attisdropped + AND attnum <> all(col_ids); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.build_source_update_move_cols_equal_expr( + source_tab_id regclass, + move_col_ids smallint[], + cte_name text +) RETURNS text AS $$ +SELECT string_agg( + format( + -- TODO should be IS NOT DISTINCT FROM + '%1$I.%2$I.%3$I = %4$I.%3$I', + msar.get_relation_schema_name(source_tab_id), + msar.get_relation_name(source_tab_id), + attname, + cte_name + ), ' AND ' +) +FROM pg_catalog.pg_attribute JOIN unnest(move_col_ids) x(a) ON attnum = x.a +WHERE + attrelid = source_tab_id; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.build_source_update_cte_join_condition_expr( + target_tab_id regclass, + target_join_col_id smallint, + added_col_ids smallint[], + update_target_cte_name text, + insert_cte_name text +) RETURNS text AS $$ +SELECT 'ON ' || string_agg( + format( + '%1$I.%3$I IS NOT DISTINCT FROM %2$I.%3$I', + update_target_cte_name, + insert_cte_name, + attname + ), ' AND ' +) +FROM + pg_catalog.pg_attribute + JOIN unnest(msar.get_other_column_ids(target_tab_id, added_col_ids || target_join_col_id)) x(a) + ON attnum = x.a +WHERE + attrelid = target_tab_id; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.move_columns_to_referenced_table( + source_tab_id regclass, + target_tab_id regclass, + move_col_ids smallint[] +) RETURNS void AS $$ +DECLARE + source_join_col_id smallint; + target_join_col_id smallint; + preexisting_col_expr CONSTANT text := msar.build_all_columns_expr(target_tab_id); + move_col_expr CONSTANT text := msar.build_columns_expr(source_tab_id, move_col_ids); + move_col_defs CONSTANT jsonb := msar.get_extracted_col_def_jsonb(source_tab_id, move_col_ids); + move_con_defs CONSTANT jsonb := msar.get_extracted_con_def_jsonb(source_tab_id, move_col_ids); + added_col_ids smallint[]; +BEGIN + -- TODO Add a custom validator that throws pretty errors in these scenario: + -- test to make sure no multi-col fkeys reference the moved columns + -- just throw error if _any_ multicol constraint references the moved columns. + -- check behavior if one of the moving columns is referenced by another table (should raise) + SELECT conkey, confkey INTO source_join_col_id, target_join_col_id + FROM msar.get_fkey_map_table(source_tab_id) + WHERE target_oid = target_tab_id; + IF move_col_ids @> ARRAY[source_join_col_id] THEN + RAISE EXCEPTION 'The joining column cannot be moved.'; + END IF; + added_col_ids := msar.add_columns(target_tab_id, move_col_defs, true); + EXECUTE format( + $q$WITH merged_cte AS ( + SELECT DISTINCT %1$s, %2$s + FROM %3$I.%4$I JOIN %6$I.%7$I ON %3$I.%4$I.%5$I = %6$I.%7$I.%8$I + ), row_numbered_cte AS ( + SELECT *, row_number() OVER (PARTITION BY %8$I ORDER BY %9$s) AS __msar_row_number + FROM merged_cte + ), update_target_cte AS ( + UPDATE %6$I.%7$I SET (%9$s) = ( + SELECT %9$s + FROM row_numbered_cte + WHERE row_numbered_cte.%8$I=%6$I.%7$I.%8$I + AND __msar_row_number = 1 + ) + RETURNING * + ), insert_cte AS ( + INSERT INTO %6$I.%7$I (%10$s) + SELECT %10$s FROM row_numbered_cte + WHERE __msar_row_number <> 1 + RETURNING * + ) + UPDATE %3$I.%4$I SET %5$I = insert_cte.%8$I + FROM update_target_cte JOIN insert_cte %11$s + WHERE %3$I.%4$I.%5$I = update_target_cte.%8$I AND %12$s + $q$, + preexisting_col_expr, + move_col_expr, + msar.get_relation_schema_name(source_tab_id), + msar.get_relation_name(source_tab_id), + msar.get_column_name(source_tab_id, source_join_col_id), + msar.get_relation_schema_name(target_tab_id), + msar.get_relation_name(target_tab_id), + msar.get_column_name(target_tab_id, target_join_col_id), + msar.build_unqualified_columns_expr(source_tab_id, move_col_ids), + msar.build_unqualified_columns_expr( + target_tab_id, msar.get_other_column_ids(target_tab_id, ARRAY[target_join_col_id]) + ), + msar.build_source_update_cte_join_condition_expr( + target_tab_id, target_join_col_id, added_col_ids, 'update_target_cte', 'insert_cte' + ), + msar.build_source_update_move_cols_equal_expr(source_tab_id, move_col_ids, 'insert_cte') + ); + PERFORM msar.add_constraints(target_tab_id, move_con_defs); + PERFORM msar.drop_columns(source_tab_id, variadic move_col_ids); +END; +$$ LANGUAGE plpgsql; + + +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- DQL FUNCTIONS +-- +-- This set of functions is for getting records from python. +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +-- Data type formatting functions + + +CREATE OR REPLACE FUNCTION msar.format_data(val date) RETURNS text AS $$ +SELECT to_char(val, 'YYYY-MM-DD AD'); +$$ LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT PARALLEL SAFE; + + +CREATE OR REPLACE FUNCTION msar.format_data(val time without time zone) RETURNS text AS $$ +SELECT concat(to_char(val, 'HH24:MI'), ':', to_char(date_part('seconds', val), 'FM00.0999999999')); +$$ LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT PARALLEL SAFE; + + +CREATE OR REPLACE FUNCTION msar.format_data(val time with time zone) RETURNS text AS $$ +SELECT CASE + WHEN date_part('timezone_hour', val) = 0 AND date_part('timezone_minute', val) = 0 + THEN concat( + to_char(date_part('hour', val), 'FM00'), ':', to_char(date_part('minute', val), 'FM00'), + ':', to_char(date_part('seconds', val), 'FM00.0999999999'), 'Z' + ) + ELSE + concat( + to_char(date_part('hour', val), 'FM00'), ':', to_char(date_part('minute', val), 'FM00'), + ':', to_char(date_part('seconds', val), 'FM00.0999999999'), + to_char(date_part('timezone_hour', val), 'S00'), ':', + ltrim(to_char(date_part('timezone_minute', val), '00'), '+- ') + ) +END; +$$ LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT PARALLEL SAFE; + + +CREATE OR REPLACE FUNCTION msar.format_data(val timestamp without time zone) RETURNS text AS $$ +SELECT + concat( + to_char(val, 'YYYY-MM-DD"T"HH24:MI'), + ':', to_char(date_part('seconds', val), 'FM00.0999999999'), + to_char(val, ' BC') + ); +$$ LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT PARALLEL SAFE; + + +CREATE OR REPLACE FUNCTION msar.format_data(val timestamp with time zone) RETURNS text AS $$ +SELECT CASE + WHEN date_part('timezone_hour', val) = 0 AND date_part('timezone_minute', val) = 0 + THEN concat( + to_char(val, 'YYYY-MM-DD"T"HH24:MI'), + ':', to_char(date_part('seconds', val), 'FM00.0999999999'), 'Z', to_char(val, ' BC') + ) + ELSE + concat( + to_char(val, 'YYYY-MM-DD"T"HH24:MI'), + ':', to_char(date_part('seconds', val), 'FM00.0999999999'), + to_char(date_part('timezone_hour', val), 'S00'), + ':', ltrim(to_char(date_part('timezone_minute', val), '00'), '+- '), to_char(val, ' BC') + ) +END; +$$ LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT PARALLEL SAFE; + + +CREATE OR REPLACE FUNCTION msar.format_data(val interval) returns text AS $$ +SELECT concat( + to_char(val, 'PFMYYYY"Y"FMMM"M"FMDD"D""T"FMHH24"H"FMMI"M"'), date_part('seconds', val), 'S' +); +$$ LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT PARALLEL SAFE; + + +CREATE OR REPLACE FUNCTION msar.format_data(val anyelement) returns anyelement AS $$ +SELECT val; +$$ LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT PARALLEL SAFE; + + +DROP TABLE IF EXISTS msar.expr_templates; +CREATE TABLE msar.expr_templates (expr_key text PRIMARY KEY, expr_template text); +INSERT INTO msar.expr_templates VALUES + -- basic composition operators + ('and', '(%s) AND (%s)'), + ('or', '(%s) OR (%s)'), + -- general comparison operators + ('equal', '(%s) = (%s)'), + ('lesser', '(%s) < (%s)'), + ('greater', '(%s) > (%s)'), + ('lesser_or_equal', '(%s) <= (%s)'), + ('greater_or_equal', '(%s) >= (%s)'), + ('null', '(%s) IS NULL'), + ('not_null', '(%s) IS NOT NULL'), + -- string specific filters + ('contains_case_insensitive', 'strpos(lower(%s), lower(%s))::boolean'), + ('starts_with_case_insensitive', 'starts_with(lower(%s), lower(%s))'), + ('contains', 'strpos((%s), (%s))::boolean'), + ('starts_with', 'starts_with((%s), (%s))'), + -- json(b) filters and expressions + ('json_array_length', 'jsonb_array_length((%s)::jsonb)'), + ('json_array_contains', '(%s)::jsonb @> (%s)::jsonb'), + ('element_in_json_array_untyped', '(%s)::text IN (SELECT jsonb_array_elements_text(%s))'), + ('convert_to_json', 'to_jsonb(%s)'), + -- date part extractors + ('truncate_to_year', 'to_char((%s)::date, ''YYYY AD'')'), + ('truncate_to_month', 'to_char((%s)::date, ''YYYY-MM AD'')'), + ('truncate_to_day', 'to_char((%s)::date, ''YYYY-MM-DD AD'')'), + -- URI part getters + ('uri_scheme', 'mathesar_types.uri_scheme(%s)'), + ('uri_authority', 'mathesar_types.uri_authority(%s)'), + -- Email part getters + ('email_domain', 'mathesar_types.email_domain_name(%s)'), + -- Data formatter which is sometimes useful in comparison + ('format_data', 'msar.format_data(%s)') +; + +CREATE OR REPLACE FUNCTION msar.build_expr(rel_id oid, tree jsonb) RETURNS text AS $$ +SELECT CASE tree ->> 'type' + WHEN 'literal' THEN format('%L', tree ->> 'value') + WHEN 'attnum' THEN format('%I', msar.get_column_name(rel_id, (tree ->> 'value')::smallint)) + ELSE + format(max(expr_template), VARIADIC array_agg(msar.build_expr(rel_id, inner_tree))) +END +FROM jsonb_array_elements(tree -> 'args') inner_tree, msar.expr_templates +WHERE tree ->> 'type' = expr_key +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.build_where_clause(rel_id oid, tree jsonb) RETURNS text AS $$ +SELECT 'WHERE ' || msar.build_expr(rel_id, tree); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.sanitize_direction(direction text) RETURNS text AS $$/* +*/ +SELECT CASE lower(direction) + WHEN 'asc' THEN 'ASC' + WHEN 'desc' THEN 'DESC' +END; +$$ LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT PARALLEL SAFE; + + +CREATE OR REPLACE FUNCTION msar.get_pkey_order(tab_id oid) RETURNS jsonb AS $$ +SELECT jsonb_agg(jsonb_build_object('attnum', attnum, 'direction', 'asc')) +FROM pg_constraint, LATERAL unnest(conkey) attnum +WHERE contype='p' AND conrelid=tab_id AND has_column_privilege(tab_id, attnum, 'SELECT'); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_total_order(tab_id oid) RETURNS jsonb AS $$ +WITH orderable_cte AS ( + SELECT attnum + FROM pg_catalog.pg_attribute + INNER JOIN pg_catalog.pg_cast ON atttypid=castsource + INNER JOIN pg_catalog.pg_operator ON casttarget=oprleft + WHERE + attrelid = tab_id + AND attnum > 0 + AND NOT attisdropped + AND castcontext = 'i' + AND oprname = '<' + UNION SELECT attnum + FROM pg_catalog.pg_attribute + INNER JOIN pg_catalog.pg_operator ON atttypid=oprleft + WHERE + attrelid = tab_id + AND attnum > 0 + AND NOT attisdropped + AND oprname = '<' + ORDER BY attnum +) +SELECT COALESCE(jsonb_agg(jsonb_build_object('attnum', attnum, 'direction', 'asc')), '[]'::jsonb) +-- This privilege check is redundant in context, but may be useful for other callers. +FROM orderable_cte +-- This privilege check is redundant in context, but may be useful for other callers. +WHERE has_column_privilege(tab_id, attnum, 'SELECT'); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_total_order_expr(tab_id oid, order_ jsonb) RETURNS text AS $$/* +Build a deterministic order expression for the given table and order JSON. +Args: + tab_id: The OID of the table whose columns we'll order by. + order_: A JSONB array defining any desired ordering of columns. +*/ +SELECT string_agg(format('%I %s', attnum, msar.sanitize_direction(direction)), ', ') +FROM jsonb_to_recordset( + COALESCE( + COALESCE(order_, '[]'::jsonb) || msar.get_pkey_order(tab_id), + COALESCE(order_, '[]'::jsonb) || msar.get_total_order(tab_id) + ) +) + AS x(attnum smallint, direction text) +WHERE has_column_privilege(tab_id, attnum, 'SELECT'); +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION +msar.build_order_by_expr(tab_id oid, order_ jsonb) RETURNS text AS $$/* +Build an ORDER BY expression for the given table and order JSON. + +The ORDER BY expression will refer to columns by their attnum. This is designed to work together +with `msar.build_selectable_column_expr`. It will only use the columns to which the user has access. +Finally, this function will append either a primary key, or all columns to the produced ORDER BY so +the resulting ordering is totally defined (i.e., deterministic). + +Args: + tab_id: The OID of the table whose columns we'll order by. + order_: A JSONB array defining any desired ordering of columns. +*/ +SELECT 'ORDER BY ' || msar.build_total_order_expr(tab_id, order_) +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION +msar.build_grouping_columns_expr(tab_id oid, group_ jsonb) RETURNS TEXT AS $$/* +Build a column expression for use in grouping window functions. + +Args: + tab_id: The OID of the table whose records we're grouping + group_ A grouping definition. + +The group_ object should have the form + { + "columns": [, , ...] + "preproc": [, , ...] + } + +The items in the preproc array should be keys appearing in the +`expr_templates` table. The corresponding column will be wrapped +in the preproc function before grouping. +*/ +SELECT string_agg( + COALESCE( + format(expr_template, quote_ident(msar.get_column_name(tab_id, col_id::smallint))), + quote_ident(msar.get_column_name(tab_id, col_id::smallint)) + ), ', ' ORDER BY ordinality +) +FROM msar.expr_templates RIGHT JOIN ROWS FROM( + jsonb_array_elements_text(group_ -> 'columns'), + jsonb_array_elements_text(group_ -> 'preproc') +) WITH ORDINALITY AS x(col_id, preproc) ON expr_key = preproc +WHERE has_column_privilege(tab_id, col_id::smallint, 'SELECT'); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_group_id_expr(tab_id oid, group_ jsonb) RETURNS TEXT AS $$/* +Build an expression to define an id value for each group. +*/ +SELECT 'dense_rank() OVER (ORDER BY ' || msar.build_grouping_columns_expr(tab_id, group_) || ')'; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_group_count_expr(tab_id oid, group_ jsonb) RETURNS TEXT AS $$/* +Build an expression that adds a column with a count for each group. +*/ +SELECT 'count(1) OVER (PARTITION BY ' || msar.build_grouping_columns_expr(tab_id, group_) || ')'; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_grouping_expr(tab_id oid, group_ jsonb) RETURNS TEXT AS $$/* +Build an expression composed of an id and count for each group. + +A group is defined by distinct combinations of the (potentially transformed by preproc functions) +columns passed in `group_`. +*/ +SELECT concat( + COALESCE(msar.build_group_id_expr(tab_id, group_), 'NULL'), ' AS __mathesar_gid, ', + COALESCE(msar.build_group_count_expr(tab_id, group_), 'NULL'), ' AS __mathesar_gcount' +); +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION +msar.build_results_jsonb_expr(tab_id oid, cte_name text, order_ jsonb) RETURNS TEXT AS $$/* +Build an SQL expresson string that, when added to the record listing query, produces a JSON array +with the records resulting from the request. +*/ +SELECT format( + 'coalesce(jsonb_agg(json_build_object(' + || string_agg(format('%1$L, %2$I.%1$I', attnum, cte_name), ', ') + || ') %1$s), jsonb_build_array())', + msar.build_order_by_expr(tab_id, order_) +) +FROM pg_catalog.pg_attribute +WHERE + attrelid = tab_id + AND attnum > 0 + AND NOT attisdropped + AND has_column_privilege(attrelid, attnum, 'SELECT'); +$$ LANGUAGE SQL STABLE; + + +CREATE OR REPLACE FUNCTION +msar.build_groups_cte_expr(tab_id oid, cte_name text, group_ jsonb) RETURNS TEXT AS $$/* +*/ +SELECT format( + $gj$ + __mathesar_gid AS id, + __mathesar_gcount AS count, + jsonb_build_object(%1$s) AS results_eq, + jsonb_agg(__mathesar_result_idx) AS result_indices + FROM %2$I + GROUP BY id, count, results_eq + $gj$, + string_agg( + format( + '%1$L, %2$s', + col_id, + COALESCE( + format(expr_template, quote_ident(cte_name) || '.' || quote_ident(col_id)), + quote_ident(cte_name) || '.' || quote_ident(col_id) + ) + ), + ', ' ORDER BY ordinality + ), + cte_name +) +FROM msar.expr_templates RIGHT JOIN ROWS FROM( + jsonb_array_elements_text(group_ -> 'columns'), + jsonb_array_elements_text(group_ -> 'preproc') +) WITH ORDINALITY AS x(col_id, preproc) ON expr_key = preproc +WHERE has_column_privilege(tab_id, col_id::smallint, 'SELECT'); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_grouping_results_jsonb_expr(tab_id oid, cte_name text, group_ jsonb) RETURNS TEXT AS $$/* +Build an SQL expresson string that, when added to the record listing query, produces a JSON array +with the groups resulting from the request. +*/ +SELECT format( + $gj$ + jsonb_build_object( + 'columns', %2$L::jsonb, + 'preproc', %3$L::jsonb, + 'groups', jsonb_agg( + DISTINCT jsonb_build_object( + 'id', %1$I.id, + 'count', %1$I.count, + 'results_eq', %1$I.results_eq, + 'result_indices', %1$I.result_indices + ) + ) + ) + $gj$, + cte_name, + group_ ->> 'columns', + group_ ->> 'preproc' +) +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_selectable_column_expr(tab_id oid) RETURNS text AS $$/* +Build an SQL select-target expression of only columns to which the user has access. + +Given columns with attnums 2, 3, and 4, and assuming the user has access only to columns 2 and 4, +this function will return an expression of the form: + +column_name AS "2", another_column_name AS "4" + +Args: + tab_id: The OID of the table containing the columns to select. +*/ +SELECT string_agg(format('msar.format_data(%I) AS %I', attname, attnum), ', ') +FROM pg_catalog.pg_attribute +WHERE + attrelid = tab_id + AND attnum > 0 + AND NOT attisdropped + AND has_column_privilege(attrelid, attnum, 'SELECT'); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.get_default_summary_column(tab_id oid) RETURNS smallint AS $$/* +Choose a column to use for summarizing rows of a table. + +If a string type column exists, we choose the one with a minimal attnum. If no such column exists, +we just return the column (of any type) with minimum attnum. + +Only columns to which the user has access are returned. + +Args: + tab_id: The OID of the table for which we're finding a good summary column +*/ +SELECT attnum +FROM pg_catalog.pg_attribute pga JOIN pg_catalog.pg_type pgt ON pga.atttypid = pgt.oid +WHERE pga.attrelid = tab_id + AND pga.attnum > 0 + AND NOT pga.attisdropped + AND has_column_privilege(pga.attrelid, pga.attnum, 'SELECT') +ORDER BY (CASE WHEN pgt.typcategory='S' THEN 0 ELSE 1 END), pga.attnum +LIMIT 1; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.build_summary_expr(tab_id oid) RETURNS TEXT AS $$/* +Given a table, return an SQL expression that will build a summary for each row of the table. + +Args: + tab_id: The OID of the table being summarized. +*/ +SELECT format( + 'msar.format_data(%I)::text', + msar.get_column_name(tab_id, msar.get_default_summary_column(tab_id)) +); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION msar.build_summary_cte_expr_for_table(tab_id oid) RETURNS TEXT AS $$/* +Build an SQL text expression defining a sequence of CTEs that give summaries for linked records. + +This summary amounts to just the first string-like column value for that linked record. + +Args: + tab_id: The table for whose fkey values' linked records we'll get summaries. +*/ +WITH fkey_map_cte AS (SELECT * FROM msar.get_fkey_map_table(tab_id)) +SELECT ', ' + || NULLIF( + concat_ws(', ', + 'summary_cte_self AS (SELECT msar.format_data(' + || quote_ident(msar.get_column_name(tab_id, msar.get_selectable_pkey_attnum(tab_id))) + || format( + ') AS key, %1$s AS summary FROM %2$I.%3$I)', + msar.build_summary_expr(tab_id), + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id) + ), + string_agg( + format( + $c$summary_cte_%1$s AS ( + SELECT + msar.format_data(%2$I) AS fkey, + %3$s AS summary + FROM %4$I.%5$I + )$c$, + conkey, + msar.get_column_name(target_oid, confkey), + msar.build_summary_expr(target_oid), + msar.get_relation_schema_name(target_oid), + msar.get_relation_name(target_oid) + ), ', ' + ) + ), + '' + ) +FROM fkey_map_cte; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_summary_join_expr_for_table(tab_id oid, cte_name text) RETURNS TEXT AS $$/* +Build an SQL expression to join the summary CTEs to the main CTE along fkey values. + +Args: + tab_oid: The table defining the columns of the main CTE. + cte_name: The name of the main CTE we'll join the summary CTEs to. +*/ +WITH fkey_map_cte AS (SELECT * FROM msar.get_fkey_map_table(tab_id)) +SELECT concat( + format(E'\nLEFT JOIN summary_cte_self ON %1$I.', cte_name) + || quote_ident(msar.get_selectable_pkey_attnum(tab_id)::text) + || ' = summary_cte_self.key' , + string_agg( + format( + $j$ + LEFT JOIN summary_cte_%1$s ON %2$I.%1$I = summary_cte_%1$s.fkey$j$, + conkey, + cte_name + ), ' ' + ) +) +FROM fkey_map_cte; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_summary_json_expr_for_table(tab_id oid) RETURNS TEXT AS $$/* +Build a JSON object with the results of summarizing linked records. + +Args: + tab_oid: The OID of the table for which we're getting linked record summaries. +*/ +WITH fkey_map_cte AS (SELECT * FROM msar.get_fkey_map_table(tab_id)) +SELECT 'jsonb_build_object(' || string_agg( + format( + $j$ + %1$L, COALESCE( + jsonb_object_agg( + summary_cte_%1$s.fkey, summary_cte_%1$s.summary + ) FILTER (WHERE summary_cte_%1$s.fkey IS NOT NULL), '{}'::jsonb + ) + $j$, + conkey + ), ', ' +) || ')' +FROM fkey_map_cte; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_self_summary_json_expr(tab_id oid) RETURNS TEXT AS $$/* +*/ +SELECT CASE WHEN quote_ident(msar.get_selectable_pkey_attnum(tab_id)::text) IS NOT NULL THEN + $j$ + COALESCE( + jsonb_object_agg( + summary_cte_self.key, summary_cte_self.summary + ) FILTER (WHERE summary_cte_self.key IS NOT NULL), '{}'::jsonb + ) + $j$ +END; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.list_records_from_table( + tab_id oid, + limit_ integer, + offset_ integer, + order_ jsonb, + filter_ jsonb, + group_ jsonb, + return_record_summaries boolean DEFAULT false +) RETURNS jsonb AS $$/* +Get records from a table. Only columns to which the user has access are returned. + +Args: + tab_id: The OID of the table whose records we'll get + limit_: The maximum number of rows we'll return + offset_: The number of rows to skip before returning records from following rows. + order_: An array of ordering definition objects. + filter_: An array of filter definition objects. + group_: An array of group definition objects. + return_record_summaries : Whether to return a summary for each record listed. + +The order definition objects should have the form + {"attnum": , "direction": } +*/ +DECLARE + records jsonb; +BEGIN + EXECUTE format( + $q$ + WITH count_cte AS ( + SELECT count(1) AS count FROM %2$I.%3$I %7$s + ), enriched_results_cte AS ( + SELECT %1$s, %8$s FROM %2$I.%3$I %7$s %6$s LIMIT %4$L OFFSET %5$L + ), results_ranked_cte AS ( + SELECT *, row_number() OVER (%6$s) - 1 AS __mathesar_result_idx FROM enriched_results_cte + ), groups_cte AS ( + SELECT %11$s + )%12$s + SELECT jsonb_build_object( + 'results', %9$s, + 'count', coalesce(max(count_cte.count), 0), + 'grouping', %10$s, + 'linked_record_summaries', %14$s, + 'record_summaries', %15$s, + 'query', $iq$SELECT %1$s FROM %2$I.%3$I %7$s %6$s LIMIT %4$L OFFSET %5$L$iq$ + ) + FROM enriched_results_cte + LEFT JOIN groups_cte ON enriched_results_cte.__mathesar_gid = groups_cte.id %13$s + CROSS JOIN count_cte + $q$, + msar.build_selectable_column_expr(tab_id), + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + limit_, + offset_, + msar.build_order_by_expr(tab_id, order_), + msar.build_where_clause(tab_id, filter_), + msar.build_grouping_expr(tab_id, group_), + msar.build_results_jsonb_expr(tab_id, 'enriched_results_cte', order_), + COALESCE(msar.build_grouping_results_jsonb_expr(tab_id, 'groups_cte', group_), 'NULL'), + COALESCE(msar.build_groups_cte_expr(tab_id, 'results_ranked_cte', group_), 'NULL AS id'), + msar.build_summary_cte_expr_for_table(tab_id), + msar.build_summary_join_expr_for_table(tab_id, 'enriched_results_cte'), + COALESCE(msar.build_summary_json_expr_for_table(tab_id), 'NULL'), + COALESCE( + CASE WHEN return_record_summaries THEN msar.build_self_summary_json_expr(tab_id) END, + 'NULL' + ) + ) INTO records; + RETURN records; +END; +$$ LANGUAGE plpgsql STABLE; + + +CREATE OR REPLACE FUNCTION +msar.get_score_expr(tab_id oid, parameters_ jsonb) RETURNS text AS $$ +SELECT string_agg( + CASE WHEN pgt.typcategory = 'S' THEN + format( + $s$(CASE + WHEN %1$I ILIKE %2$L THEN 4 + WHEN %1$I ILIKE %2$L || '%%' THEN 3 + WHEN %1$I ILIKE '%%' || %2$L || '%%' THEN 2 + ELSE 0 + END)$s$, + pga.attname, + x.literal + ) + ELSE + format('(CASE WHEN %1$I = %2$L THEN 4 ELSE 0 END)', pga.attname, x.literal) + END, + ' + ' +) +FROM jsonb_to_recordset(parameters_) AS x(attnum smallint, literal text) + INNER JOIN pg_catalog.pg_attribute AS pga ON x.attnum = pga.attnum + INNER JOIN pg_catalog.pg_type AS pgt ON pga.atttypid = pgt.oid +WHERE + pga.attrelid = tab_id + AND NOT pga.attisdropped + AND has_column_privilege(tab_id, x.attnum, 'SELECT') +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.search_records_from_table( + tab_id oid, + search_ jsonb, + limit_ integer, + return_record_summaries boolean DEFAULT false +) RETURNS jsonb AS $$/* +Get records from a table, filtering and sorting according to a search specification. + +Only columns to which the user has access are returned. + +Args: + tab_id: The OID of the table whose records we'll get + search_: An array of search definition objects. + limit_: The maximum number of rows we'll return. + +The search definition objects should have the form + {"attnum": , "literal": } +*/ +DECLARE + records jsonb; +BEGIN + EXECUTE format( + $q$ + WITH count_cte AS ( + SELECT count(1) AS count FROM %2$I.%3$I %4$s + ), results_cte AS ( + SELECT %1$s FROM %2$I.%3$I %4$s ORDER BY %6$s LIMIT %5$L + )%7$s + SELECT jsonb_build_object( + 'results', coalesce(jsonb_agg(row_to_json(results_cte.*)), jsonb_build_array()), + 'count', coalesce(max(count_cte.count), 0), + 'linked_record_summaries', %9$s, + 'record_summaries', %10$s, + 'query', $iq$SELECT %1$s FROM %2$I.%3$I %4$s ORDER BY %6$s LIMIT %5$L$iq$ + ) + FROM results_cte %8$s + CROSS JOIN count_cte + $q$, + msar.build_selectable_column_expr(tab_id), + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + 'WHERE ' || msar.get_score_expr(tab_id, search_) || ' > 0', + limit_, + concat( + msar.get_score_expr(tab_id, search_) || ' DESC, ', + msar.build_total_order_expr(tab_id, null) + ), + msar.build_summary_cte_expr_for_table(tab_id), + msar.build_summary_join_expr_for_table(tab_id, 'results_cte'), + COALESCE(msar.build_summary_json_expr_for_table(tab_id), 'NULL'), + COALESCE( + CASE WHEN return_record_summaries THEN msar.build_self_summary_json_expr(tab_id) END, + 'NULL' + ) + ) INTO records; + RETURN records; +END; +$$ LANGUAGE plpgsql; + + +DROP FUNCTION IF EXISTS msar.get_record_from_table(oid, anyelement); +DROP FUNCTION IF EXISTS msar.get_record_from_table(oid, anyelement, boolean); +CREATE OR REPLACE FUNCTION +msar.get_record_from_table( + tab_id oid, + rec_id anycompatible, + return_record_summaries boolean DEFAULT false +) RETURNS jsonb AS $$/* +Get single record from a table. Only columns to which the user has access are returned. + +Args: + tab_id: The OID of the table whose record we'll get. + rec_id: The id value of the record. + +The table must have a single primary key column. +*/ +SELECT msar.list_records_from_table( + tab_id, null, null, null, + jsonb_build_object( + 'type', 'equal', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', msar.get_pk_column(tab_id)), + jsonb_build_object('type', 'literal', 'value', rec_id) + ) + ), + null, + return_record_summaries +) +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.delete_records_from_table(tab_id oid, rec_ids jsonb) RETURNS integer AS $$/* +Delete records from table by id. + +Args: + tab_id: The OID of the table whose record we'll delete. + rec_ids: An array of primary key values + +The table must have a single primary key column. +*/ +DECLARE + num_deleted integer; +BEGIN + EXECUTE format( + $d$ + WITH delete_cte AS (DELETE FROM %1$I.%2$I %3$s RETURNING *) + SELECT count(1) FROM delete_cte + $d$, + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + msar.build_where_clause( + tab_id, jsonb_build_object( + 'type', 'element_in_json_array_untyped', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'format_data', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', msar.get_pk_column(tab_id)) + ) + ), + jsonb_build_object('type', 'literal', 'value', rec_ids) + ) + ) + ) + ) INTO num_deleted; + RETURN num_deleted; +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_single_insert_expr(tab_id oid, rec_def jsonb) RETURNS TEXT AS $$ +SELECT CASE WHEN NULLIF(rec_def, '{}'::jsonb) IS NOT NULL THEN + ( + SELECT + format( + 'INSERT INTO %I.%I (%s) VALUES (%s)', + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + string_agg(format('%I', msar.get_column_name(tab_id, key::smallint)), ', '), + string_agg(format('%L', value), ', ') + ) + FROM jsonb_each_text(rec_def) + ) +ELSE + format( + 'INSERT INTO %I.%I DEFAULT VALUES', + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id) + ) +END; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.add_record_to_table( + tab_id oid, + rec_def jsonb, + return_record_summaries boolean DEFAULT false +) RETURNS jsonb AS $$/* +Add a record to a table. + +Args: + tab_id: The OID of the table whose record we'll delete. + rec_def: A JSON object defining the record. + +The `rec_def` object's form is defined by the record being inserted. It should have keys +corresponding to the attnums of desired columns and values corresponding to values we should +insert. + +*/ +DECLARE + rec_created_id text; + rec_created jsonb; +BEGIN + EXECUTE format( + 'WITH insert_cte AS (%1$s RETURNING %2$s) SELECT msar.format_data(%3$I)::text FROM insert_cte', + msar.build_single_insert_expr(tab_id, rec_def), + msar.build_selectable_column_expr(tab_id), + msar.get_pk_column(tab_id) + ) INTO rec_created_id; + rec_created := msar.get_record_from_table(tab_id, rec_created_id, return_record_summaries); + RETURN jsonb_build_object( + 'results', rec_created -> 'results', + 'record_summaries', rec_created -> 'record_summaries', + 'linked_record_summaries', rec_created -> 'linked_record_summaries' + ); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION +msar.build_update_expr(tab_id oid, rec_def jsonb) RETURNS TEXT AS $$ +SELECT + format( + 'UPDATE %I.%I SET (%s) = ROW(%s)', + msar.get_relation_schema_name(tab_id), + msar.get_relation_name(tab_id), + string_agg(format('%I', msar.get_column_name(tab_id, key::smallint)), ', '), + string_agg(format('%L', value), ', ') + ) +FROM jsonb_each_text(rec_def); +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; + + +DROP FUNCTION IF EXISTS msar.patch_record_in_table(oid, anyelement, jsonb, boolean); +CREATE OR REPLACE FUNCTION +msar.patch_record_in_table( + tab_id oid, + rec_id anycompatible, + rec_def jsonb, + return_record_summaries boolean DEFAULT false +) RETURNS jsonb AS $$/* +Modify (update/patch) a record in a table. + +Args: + tab_id: The OID of the table whose record we'll delete. + rec_id: The primary key value of the record we'll modify. + rec_patch: A JSON object defining the parts of the record to patch. + +Only tables with a single primary key column are supported. + +The `rec_def` object's form is defined by the record being updated. It should have keys +corresponding to the attnums of desired columns and values corresponding to values we should set. +*/ +DECLARE + rec_modified_id integer; + rec_modified jsonb; +BEGIN + EXECUTE format( + $p$ + WITH update_cte AS (%1$s %2$s RETURNING %3$s) + SELECT msar.format_data(%4$I)::text FROM update_cte + $p$, + msar.build_update_expr(tab_id, rec_def), + msar.build_where_clause( + tab_id, jsonb_build_object( + 'type', 'equal', 'args', jsonb_build_array( + jsonb_build_object('type', 'literal', 'value', rec_id), + jsonb_build_object('type', 'attnum', 'value', msar.get_pk_column(tab_id)) + ) + ) + ), + msar.build_selectable_column_expr(tab_id), + msar.get_pk_column(tab_id) + ) INTO rec_modified_id; + rec_modified := msar.get_record_from_table(tab_id, rec_modified_id, return_record_summaries); + RETURN jsonb_build_object( + 'results', rec_modified -> 'results', + 'record_summaries', rec_modified -> 'record_summaries', + 'linked_record_summaries', rec_modified -> 'linked_record_summaries' + ); +END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; diff --git a/db/sql/0_msar.sql b/db/sql/0_msar.sql deleted file mode 100644 index e16bf53246..0000000000 --- a/db/sql/0_msar.sql +++ /dev/null @@ -1,2615 +0,0 @@ -/* -This script defines a number of functions to be used for manipulating database objects (tables, -columns, schemas) using Data Definition Language style queries. - -These are the schemas where the new functions will generally live: - - __msar: These functions aren't designed to be used except by other Mathesar functions. - Generally need preformatted strings as input, won't do quoting, etc. - msar: These functions are designed to be used more easily. They'll format strings, quote - identifiers, and so on. - -The reason they're so abbreviated is to avoid namespace clashes, and also because making them longer -would make using them quite tedious, since they're everywhere. - -The functions should each be overloaded to accept at a minimum the 'fixed' ID of a given object, as -well as its name identifer(s). - -- Schemas should be identified by one of the following: - - OID, or - - Name -- Tables should be identified by one of the following: - - OID, or - - Schema, Name pair (unquoted) -- Columns should be identified by one of the following: - - OID, ATTNUM pair, or - - Schema, Table Name, Column Name triple (unquoted), or - - Table OID, Column Name pair (optional). - -Note that these identification schemes apply to the public-facing functions in the `msar` namespace, -not necessarily the internal `__msar` functions. - -NAMING CONVENTIONS - -Because function signatures are used informationally in command-generated tables, horizontal space -needs to be conserved. As a compromise between readability and terseness, we use the following -conventions in variable naming: - -attribute -> att -schema -> sch -table -> tab -column -> col -constraint -> con -object -> obj -relation -> rel - -Textual names will have the suffix _name, and numeric identifiers will have the suffix _id. - -So, the OID of a table will be tab_id and the name of a column will be col_name. The attnum of a -column will be col_id. - -Generally, we'll use snake_case for legibility and to avoid collisions with internal PostgreSQL -naming conventions. - -*/ - -CREATE SCHEMA IF NOT EXISTS __msar; -CREATE SCHEMA IF NOT EXISTS msar; - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- GENERAL DDL FUNCTIONS --- --- Functions in this section are quite general, and are the basis of the others. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - -CREATE OR REPLACE FUNCTION -__msar.exec_ddl(command text) RETURNS text AS $$/* -Execute the given command, returning the command executed. - -Not useful for SELECTing from tables. Most useful when you're performing DDL. - -Args: - command: Raw string that will be executed as a command. -*/ -BEGIN - EXECUTE command; - RETURN command; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -__msar.exec_ddl(command_template text, arguments variadic anyarray) RETURNS text AS $$/* -Execute a templated command, returning the command executed. - -The template is given in the first argument, and all further arguments are used to fill in the -template. Not useful for SELECTing from tables. Most useful when you're performing DDL. - -Args: - command_template: Raw string that will be executed as a command. - arguments: arguments that will be used to fill in the template. -*/ -DECLARE formatted_command TEXT; -BEGIN - formatted_command := format(command_template, VARIADIC arguments); - RETURN __msar.exec_ddl(formatted_command); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -__msar.build_text_tuple(text[]) RETURNS text AS $$ -SELECT '(' || string_agg(col, ', ') || ')' FROM unnest($1) x(col); -$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- INFO FUNCTIONS --- --- Functions in this section get information about a given schema, table or column. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION msar.col_description(tab_id oid, col_id integer) RETURNS text AS $$/* -Transparent wrapper for col_description. Putting it in the `msar` namespace helps route all DB calls -from Python through a single Python module. -*/ - BEGIN - RETURN col_description(tab_id, col_id); - END -$$ LANGUAGE plpgsql; - -CREATE OR REPLACE FUNCTION __msar.jsonb_key_exists(data jsonb, key text) RETURNS boolean AS $$/* -Wraps the `?` jsonb operator for improved readability. -*/ - BEGIN - RETURN data ? key; - END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION __msar.get_schema_name(sch_id oid) RETURNS TEXT AS $$/* -Return the name for a given schema, quoted as appropriate. - -The schema *must* be in the pg_namespace table to use this function. - -Args: - sch_id: The OID of the schema. -*/ -BEGIN - RETURN sch_id::regnamespace::text; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -__msar.get_fully_qualified_object_name(sch_name text, obj_name text) RETURNS text AS $$/* -Return the fully-qualified name for a given database object (e.g., table). - -Args: - sch_name: The schema of the object, quoted. - obj_name: The name of the object, unqualified and quoted. -*/ -BEGIN - RETURN format('%s.%s', sch_name, obj_name); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_fully_qualified_object_name(sch_name text, obj_name text) RETURNS text AS $$/* -Return the fully-qualified, properly quoted, name for a given database object (e.g., table). - -Args: - sch_name: The schema of the object, unquoted. - obj_name: The name of the object, unqualified and unquoted. -*/ -BEGIN - RETURN __msar.get_fully_qualified_object_name(quote_ident(sch_name), quote_ident(obj_name)); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -__msar.get_relation_name(rel_id oid) RETURNS text AS $$/* -Return the name for a given relation (e.g., table), qualified or quoted as appropriate. - -In cases where the relation is already included in the search path, the returned name will not be -fully-qualified. - -The relation *must* be in the pg_class table to use this function. - -Args: - rel_id: The OID of the relation. -*/ -BEGIN - RETURN rel_id::regclass::text; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -DROP FUNCTION IF EXISTS msar.get_relation_oid(text, text) CASCADE; -CREATE OR REPLACE FUNCTION -msar.get_relation_oid(sch_name text, rel_name text) RETURNS oid AS $$/* -Return the OID for a given relation (e.g., table). - -The relation *must* be in the pg_class table to use this function. - -Args: - sch_name: The schema of the relation, unquoted. - rel_name: The name of the relation, unqualified and unquoted. -*/ -BEGIN - RETURN msar.get_fully_qualified_object_name(sch_name, rel_name)::regclass::oid; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION msar.get_relation_namespace_oid(rel_id oid) RETURNS oid AS $$/* -Get the OID of the namespace containing the given relation. - -Most useful for getting the OID of the schema of a given table. - -Args: - rel_id: The OID of the relation whose namespace we want to find. -*/ -SELECT relnamespace FROM pg_class WHERE oid=rel_id; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - - -CREATE OR REPLACE FUNCTION -msar.get_column_name(rel_id oid, col_id integer) RETURNS text AS $$/* -Return the name for a given column in a given relation (e.g., table). - -More precisely, this function returns the name of attributes of any relation appearing in the -pg_class catalog table (so you could find attributes of indices with this function). - -Args: - rel_id: The OID of the relation. - col_id: The attnum of the column in the relation. -*/ -SELECT quote_ident(attname::text) FROM pg_attribute WHERE attrelid=rel_id AND attnum=col_id; -$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_column_name(rel_id oid, col_name text) RETURNS text AS $$/* -Return the name for a given column in a given relation (e.g., table). - -More precisely, this function returns the quoted name of attributes of any relation appearing in the -pg_class catalog table (so you could find attributes of indices with this function). If the given -col_name is not in the relation, we return null. - -This has the effect of both quoting and preparing the given col_name, and also validating that it -exists. - -Args: - rel_id: The OID of the relation. - col_name: The unquoted name of the column in the relation. -*/ -SELECT quote_ident(attname::text) FROM pg_attribute WHERE attrelid=rel_id AND attname=col_name; -$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_column_names(rel_id oid, columns jsonb) RETURNS text[] AS $$/* -Return the names for given columns in a given relation (e.g., table). - -- If the rel_id is given as 0, the assumption is that this is a new table, so we just apply normal -quoting rules to a column without validating anything further. -- If the rel_id is given as nonzero, and a column is given as text, then we validate that - the column name exists in the table, and use that. -- If the rel_id is given as nonzero, and the column is given as a number, then we look the column up - by attnum and use that name. - -The columns jsonb can have a mix of numerical IDs and column names. The reason for this is that we -may be adding a column algorithmically, and this saves having to modify the column adding logic -based on the IDs passed by the user for given columns. - -Args: - rel_id: The OID of the relation. - columns: A JSONB array of the unquoted names or IDs (can be mixed) of the columns. -*/ -SELECT array_agg( - CASE - WHEN rel_id=0 THEN quote_ident(col #>> '{}') - WHEN jsonb_typeof(col)='number' THEN msar.get_column_name(rel_id, col::integer) - WHEN jsonb_typeof(col)='string' THEN msar.get_column_name(rel_id, col #>> '{}') - END -) -FROM jsonb_array_elements(columns) AS x(col); -$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION msar.get_attnum(rel_id oid, att_name text) RETURNS smallint AS $$/* -Get the attnum for a given attribute in the relation. Returns null if no such attribute exists. - -Usually, this will be used to get the attnum for a column of a table. - -Args: - rel_id: The relation where we'll look for the attribute. - att_name: The name of the attribute, unquoted. -*/ -SELECT attnum FROM pg_attribute WHERE attrelid=rel_id AND attname=att_name; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.is_pkey_col(rel_id oid, col_id integer) RETURNS boolean AS $$/* -Return whether the given column is in the primary key of the given relation (e.g., table). - -Args: - rel_id: The OID of the relation. - col_id: The attnum of the column in the relation. -*/ -BEGIN - RETURN ARRAY[col_id::smallint] <@ conkey FROM pg_constraint WHERE conrelid=rel_id and contype='p'; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.is_default_possibly_dynamic(tab_id oid, col_id integer) RETURNS boolean AS $$/* -Determine whether the default value for the given column is an expression or constant. - -If the column default is an expression, then we return 'True', since that could be dynamic. If the -column default is a simple constant, we return 'False'. The check is not very sophisticated, and -errs on the side of returning 'True'. We simply pull apart the pg_node_tree representation of the -expression, and check whether the root node is a known function call type. Note that we do *not* -search any deeper in the tree than the root node. This means we won't notice that some expressions -are actually constant (or at least static), if they have a function call or operator as their root -node. - -For example, the following would return 'True', even though they're not dynamic: - 3 + 5 - mathesar_types.cast_to_integer('8') - -Args: - tab_id: The OID of the table with the column. - col_id: The attnum of the column in the table. -*/ -SELECT - -- This is a typical dynamic default like NOW() or CURRENT_DATE - (split_part(substring(adbin, 2), ' ', 1) IN (('SQLVALUEFUNCTION'), ('FUNCEXPR'))) - OR - -- This is an identity column `GENERATED {ALWAYS | DEFAULT} AS IDENTITY` - (attidentity <> '') - OR - -- Other generated columns show up here. - (attgenerated <> '') -FROM pg_attribute LEFT JOIN pg_attrdef ON attrelid=adrelid AND attnum=adnum -WHERE attrelid=tab_id AND attnum=col_id; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.is_mathesar_id_column(tab_id oid, col_id integer) RETURNS boolean AS $$/* -Determine whether the given column is our default Mathesar ID column. - -The column in question is always attnum 1, and is created with the string - - id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY - -Args: - tab_id: The OID of the table whose column we'll check - col_id: The attnum of the column in question -*/ -SELECT col_id=1 AND attname='id' AND atttypid='integer'::regtype::oid AND attidentity <> '' -FROM pg_attribute WHERE attrelid=tab_id AND attnum=col_id; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_cast_function_name(target_type regtype) RETURNS text AS $$/* -Return a string giving the appropriate name of the casting function for the target_type. - -Currently set up to duplicate the logic in our python casting function builder. This will be -changed. Given a qualified, potentially capitalized type name, we -- Remove the namespace (schema), -- Replace any white space in the type name with underscores, -- Replace double quotes in the type name (e.g., the "char" type) with '_double_quote_' -- Use the prepped type name in the name `mathesar_types.cast_to_%s`. - -Args: - target_type: This should be a type that exists. -*/ -DECLARE target_type_prepped text; -BEGIN - -- TODO: Come up with a way to build these names that is more robust against collisions. - WITH unqualifier AS ( - SELECT x[array_upper(x, 1)] unqualified_type - FROM regexp_split_to_array(target_type::text, '\.') x - ), unspacer AS( - SELECT replace(unqualified_type, ' ', '_') unspaced_type - FROM unqualifier - ) - SELECT replace(unspaced_type, '"', '_double_quote_') - FROM unspacer - INTO target_type_prepped; - RETURN format('mathesar_types.cast_to_%s', target_type_prepped); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_constraint_name(con_id oid) RETURNS text AS $$/* -Return the quoted constraint name of the correponding constraint oid. - -Args: - con_id: The OID of the constraint. -*/ -BEGIN - RETURN quote_ident(conname::text) FROM pg_constraint WHERE pg_constraint.oid = con_id; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_pk_column(rel_id oid) RETURNS smallint AS $$/* -Return the first column attnum in the primary key of a given relation (e.g., table). - -Args: - rel_id: The OID of the relation. -*/ -SELECT conkey[1] -FROM pg_constraint -WHERE contype='p' -AND conrelid=rel_id; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_pk_column(sch_name text, rel_name text) RETURNS smallint AS $$/* -Return the first column attnum in the primary key of a given relation (e.g., table). - -Args: - sch_name: The schema of the relation, unquoted. - rel_name: The name of the relation, unqualified and unquoted. -*/ -SELECT conkey[1] -FROM pg_constraint -WHERE contype='p' -AND conrelid=msar.get_relation_oid(sch_name, rel_name); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_column_type(rel_id oid, col_id smallint) RETURNS text AS $$/* -Return the type of a given column in a relation. - -Args: - rel_id: The OID of the relation. - col_id: The attnum of the column in the relation. -*/ -SELECT atttypid::regtype -FROM pg_attribute -WHERE attnum = col_id -AND attrelid = rel_id; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.get_column_type(sch_name text, rel_name text, col_name text) RETURNS text AS $$/* -Return the type of a given column in a relation. - -Args: - sch_name: The schema of the relation, unquoted. - rel_name: The name of the relation, unqualified and unquoted. - col_name: The name of the column in the relation, unquoted. -*/ -SELECT atttypid::regtype -FROM pg_attribute -WHERE attname = quote_ident(col_name) -AND attrelid = msar.get_relation_oid(sch_name, rel_name); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION msar.column_exists(tab_id oid, col_name text) RETURNS boolean AS $$/* -Return true if the given column exists in the table, false otherwise. -*/ -SELECT EXISTS (SELECT 1 FROM pg_attribute WHERE attrelid=tab_id AND attname=col_name); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION msar.schema_exists(schema_name text) RETURNS boolean AS $$/* -Return true if the given schema exists in the current database, false otherwise. -*/ -SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname=schema_name); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- ROLE MANIPULATION FUNCTIONS --- --- Functions in this section should always involve creating, granting, or revoking privileges or --- roles ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - --- Create mathesar user ---------------------------------------------------------------------------- - - -CREATE OR REPLACE FUNCTION -msar.create_basic_mathesar_user(username text, password_ text) RETURNS TEXT AS $$/* -*/ -DECLARE - sch_name text; - mathesar_schemas text[] := ARRAY['mathesar_types', '__msar', 'msar']; -BEGIN - PERFORM __msar.exec_ddl('CREATE USER %I WITH PASSWORD %L', username, password_); - PERFORM __msar.exec_ddl( - 'GRANT CREATE, CONNECT, TEMP ON DATABASE %I TO %I', - current_database()::text, - username - ); - FOREACH sch_name IN ARRAY mathesar_schemas LOOP - BEGIN - PERFORM __msar.exec_ddl('GRANT USAGE ON SCHEMA %I TO %I', sch_name, username); - EXCEPTION - WHEN invalid_schema_name THEN - RAISE NOTICE 'Schema % does not exist', sch_name; - END; - END LOOP; - RETURN username; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- ALTER SCHEMA FUNCTIONS --- --- Functions in this section should always involve 'ALTER SCHEMA'. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - --- Rename schema ----------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.rename_schema(old_sch_name text, new_sch_name text) RETURNS TEXT AS $$/* -Change a schema's name, returning the command executed. - -Args: - old_sch_name: A properly quoted original schema name - new_sch_name: A properly quoted new schema name -*/ -DECLARE - cmd_template text; -BEGIN - cmd_template := 'ALTER SCHEMA %s RENAME TO %s'; - RETURN __msar.exec_ddl(cmd_template, old_sch_name, new_sch_name); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.rename_schema(old_sch_name text, new_sch_name text) RETURNS TEXT AS $$/* -Change a schema's name, returning the command executed. - -Args: - old_sch_name: An unquoted original schema name - new_sch_name: An unquoted new schema name -*/ -BEGIN - RETURN __msar.rename_schema(quote_ident(old_sch_name), quote_ident(new_sch_name)); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION msar.rename_schema(sch_id oid, new_sch_name text) RETURNS TEXT AS $$/* -Change a schema's name, returning the command executed. - -Args: - sch_id: The OID of the original schema - new_sch_name: An unquoted new schema name -*/ -BEGIN - RETURN __msar.rename_schema(__msar.get_schema_name(sch_id), quote_ident(new_sch_name)); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - --- Comment on schema ------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.comment_on_schema(sch_name text, comment_ text) RETURNS TEXT AS $$/* -Change the description of a schema, returning command executed. - -Args: - sch_name: The quoted name of the schema whose comment we will change. - comment_: The new comment. Any quotes or special characters must be escaped. -*/ -DECLARE - cmd_template text; -BEGIN - cmd_template := 'COMMENT ON SCHEMA %s IS %s'; - RETURN __msar.exec_ddl(cmd_template, sch_name, comment_); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.comment_on_schema(sch_name text, comment_ text) RETURNS TEXT AS $$/* -Change the description of a schema, returning command executed. - -Args: - sch_name: The quoted name of the schema whose comment we will change. - comment_: The new comment. -*/ -BEGIN - RETURN __msar.comment_on_schema(quote_ident(sch_name), quote_literal(comment_)); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION msar.comment_on_schema(sch_id oid, comment_ text) RETURNS TEXT AS $$/* -Change the description of a schema, returning command executed. - -Args: - sch_id: The OID of the schema. - comment_: The new comment. -*/ -BEGIN - RETURN __msar.comment_on_schema(__msar.get_schema_name(sch_id), quote_literal(comment_)); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- CREATE SCHEMA FUNCTIONS --- --- Create a schema. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - --- Create schema ----------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.create_schema(sch_name text, if_not_exists boolean) RETURNS TEXT AS $$/* -Create a schema, returning the command executed. - -Args: - sch_name: A properly quoted name of the schema to be created - if_not_exists: Whether to ignore an error if the schema does exist -*/ -DECLARE - cmd_template text; -BEGIN - IF if_not_exists - THEN - cmd_template := 'CREATE SCHEMA IF NOT EXISTS %s'; - ELSE - cmd_template := 'CREATE SCHEMA %s'; - END IF; - RETURN __msar.exec_ddl(cmd_template, sch_name); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.create_schema(sch_name text, if_not_exists boolean) RETURNS TEXT AS $$/* -Create a schema, returning the command executed. - -Args: - sch_name: An unquoted name of the schema to be created - if_not_exists: Whether to ignore an error if the schema does exist -*/ -BEGIN - RETURN __msar.create_schema(quote_ident(sch_name), if_not_exists); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- DROP SCHEMA FUNCTIONS --- --- Drop a schema. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - --- Drop schema ------------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.drop_schema(sch_name text, cascade_ boolean, if_exists boolean) RETURNS TEXT AS $$/* -Drop a schema, returning the command executed. - -Args: - sch_name: A properly quoted name of the schema to be dropped - cascade_: Whether to drop dependent objects. - if_exists: Whether to ignore an error if the schema doesn't exist -*/ -DECLARE - cmd_template text; -BEGIN - IF if_exists - THEN - cmd_template := 'DROP SCHEMA IF EXISTS %s'; - ELSE - cmd_template := 'DROP SCHEMA %s'; - END IF; - IF cascade_ - THEN - cmd_template = cmd_template || ' CASCADE'; - END IF; - RETURN __msar.exec_ddl(cmd_template, sch_name); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.drop_schema(sch_id oid, cascade_ boolean, if_exists boolean) RETURNS TEXT AS $$/* -Drop a schema, returning the command executed. - -Args: - sch_id: The OID of the schema to drop - cascade_: Whether to drop dependent objects. - if_exists: Whether to ignore an error if the schema doesn't exist -*/ -BEGIN - RETURN __msar.drop_schema(__msar.get_schema_name(sch_id), cascade_, if_exists); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.drop_schema(sch_name text, cascade_ boolean, if_exists boolean) RETURNS TEXT AS $$/* -Drop a schema, returning the command executed. - -Args: - sch_name: An unqoted name of the schema to be dropped - cascade_: Whether to drop dependent objects. - if_exists: Whether to ignore an error if the schema doesn't exist -*/ -BEGIN - RETURN __msar.drop_schema(quote_ident(sch_name), cascade_, if_exists); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- ALTER TABLE FUNCTIONS --- --- Functions in this section should always involve 'ALTER TABLE'. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - --- Rename table ------------------------------------------------------------------------------------ - -CREATE OR REPLACE FUNCTION -__msar.rename_table(old_tab_name text, new_tab_name text) RETURNS text AS $$/* -Change a table's name, returning the command executed. - -Args: - old_tab_name: properly quoted, qualified table name - new_tab_name: properly quoted, unqualified table name -*/ -BEGIN - RETURN __msar.exec_ddl( - 'ALTER TABLE %s RENAME TO %s', old_tab_name, new_tab_name - ); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.rename_table(tab_id oid, new_tab_name text) RETURNS text AS $$/* -Change a table's name, returning the command executed. - -Args: - tab_id: the OID of the table whose name we want to change - new_tab_name: unquoted, unqualified table name -*/ -BEGIN - RETURN __msar.rename_table(__msar.get_relation_name(tab_id), quote_ident(new_tab_name)); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.rename_table(sch_name text, old_tab_name text, new_tab_name text) RETURNS text AS $$/* -Change a table's name, returning the command executed. - -Args: - sch_name: unquoted schema name where the table lives - old_tab_name: unquoted, unqualified original table name - new_tab_name: unquoted, unqualified new table name -*/ -DECLARE fullname text; -BEGIN - fullname := msar.get_fully_qualified_object_name(sch_name, old_tab_name); - RETURN __msar.rename_table(fullname, quote_ident(new_tab_name)); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - --- Comment on table -------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.comment_on_table(tab_name text, comment_ text) RETURNS text AS $$/* -Change the description of a table, returning command executed. - -Args: - tab_name: The qualified, quoted name of the table whose comment we will change. - comment_: The new comment. Any quotes or special characters must be escaped. -*/ -SELECT __msar.exec_ddl('COMMENT ON TABLE %s IS %s', tab_name, comment_); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.comment_on_table(tab_id oid, comment_ text) RETURNS text AS $$/* -Change the description of a table, returning command executed. - -Args: - tab_id: The OID of the table whose comment we will change. - comment_: The new comment. -*/ -SELECT __msar.comment_on_table(__msar.get_relation_name(tab_id), quote_literal(comment_)); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.comment_on_table(sch_name text, tab_name text, comment_ text) RETURNS text AS $$/* -Change the description of a table, returning command executed. - -Args: - sch_name: The schema of the table whose comment we will change. - tab_name: The name of the table whose comment we will change. - comment_: The new comment. -*/ -SELECT __msar.comment_on_table( - msar.get_fully_qualified_object_name(sch_name, tab_name), - quote_literal(comment_) -); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - --- Alter Table: LEFT IN PYTHON (for now) ----------------------------------------------------------- - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- ALTER TABLE FUNCTIONS: Column operations --- --- Functions in this section should always involve 'ALTER TABLE', and one or more columns ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - --- Update table primary key sequence to latest ----------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.update_pk_sequence_to_latest(tab_name text, col_name text) RETURNS text AS $$/* -Update the primary key sequence to the maximum of the primary key column, plus one. - -Args: - tab_name: Fully-qualified, quoted table name - col_name: The column name of the primary key. -*/ -BEGIN - RETURN __msar.exec_ddl( - 'SELECT ' - || 'setval(' - || 'pg_get_serial_sequence(''%1$s'', ''%2$s''), coalesce(max(%2$s) + 1, 1), false' - || ') ' - || 'FROM %1$s', - tab_name, col_name - ); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.update_pk_sequence_to_latest(tab_id oid, col_id integer) RETURNS text AS $$/* -Update the primary key sequence to the maximum of the primary key column, plus one. - -Args: - tab_id: The OID of the table whose primary key sequence we'll update. - col_id: The attnum of the primary key column. -*/ -DECLARE tab_name text; -DECLARE col_name text; -BEGIN - tab_name := __msar.get_relation_name(tab_id); - col_name := msar.get_column_name(tab_id, col_id); - RETURN __msar.update_pk_sequence_to_latest(tab_name, col_name); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.update_pk_sequence_to_latest(sch_name text, tab_name text, col_name text) RETURNS text AS $$/* -Update the primary key sequence to the maximum of the primary key column, plus one. - -Args: - sch_name: The schema where the table whose primary key sequence we'll update lives. - tab_name: The table whose primary key sequence we'll update. - col_name: The name of the primary key column. -*/ -DECLARE qualified_tab_name text; -BEGIN - qualified_tab_name := msar.get_fully_qualified_object_name(sch_name, tab_name); - RETURN __msar.update_pk_sequence_to_latest(qualified_tab_name, quote_ident(col_name)); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - --- Drop columns from table ------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.drop_columns(tab_name text, col_names variadic text[]) RETURNS text AS $$/* -Drop the given columns from the given table. - -Args: - tab_name: Fully-qualified, quoted table name. - col_names: The column names to be dropped, quoted. -*/ -DECLARE column_drops text; -BEGIN - SELECT string_agg(format('DROP COLUMN %s', col), ', ') - FROM unnest(col_names) AS col - INTO column_drops; - RETURN __msar.exec_ddl('ALTER TABLE %s %s', tab_name, column_drops); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.drop_columns(tab_id oid, col_ids variadic integer[]) RETURNS text AS $$/* -Drop the given columns from the given table. - -Args: - tab_id: OID of the table whose columns we'll drop. - col_ids: The attnums of the columns to drop. -*/ -DECLARE col_names text[]; -BEGIN - SELECT array_agg(quote_ident(attname)) - FROM pg_attribute - WHERE attrelid=tab_id AND ARRAY[attnum::integer] <@ col_ids - INTO col_names; - RETURN __msar.drop_columns(__msar.get_relation_name(tab_id), variadic col_names); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.drop_columns(sch_name text, tab_name text, col_names variadic text[]) RETURNS text AS $$/* -Drop the given columns from the given table. - -Args: - sch_name: The schema where the table whose columns we'll drop lives, unquoted. - tab_name: The table whose columns we'll drop, unquoted and unqualified. - col_names: The columns to drop, unquoted. -*/ -DECLARE prepared_col_names text[]; -DECLARE fully_qualified_tab_name text; -BEGIN - SELECT array_agg(quote_ident(col)) FROM unnest(col_names) AS col INTO prepared_col_names; - fully_qualified_tab_name := msar.get_fully_qualified_object_name(sch_name, tab_name); - RETURN __msar.drop_columns(fully_qualified_tab_name, variadic prepared_col_names); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - --- Column creation definition type ----------------------------------------------------------------- - -DROP TYPE IF EXISTS __msar.col_def CASCADE; -CREATE TYPE __msar.col_def AS ( - name_ text, -- The name of the column to create, quoted. - type_ text, -- The type of the column to create, fully specced with arguments. - not_null boolean, -- A boolean to describe whether the column is nullable or not. - default_ text, -- Text SQL giving the default value for the column. - identity_ boolean, -- A boolean giving whether the column is an identity pkey column. - description text -- A text that will become a comment for the column -); - - -CREATE OR REPLACE FUNCTION -msar.get_fresh_copy_name(tab_id oid, col_id smallint) RETURNS text AS $$/* -This function generates a name to be used for a duplicated column. - -Given an original column name 'abc', the resulting copies will be named 'abc ', where is -minimal (at least 1) subject to the restriction that 'abc ' is not already a column of the table -given. - -Args: - tab_id: the table for which we'll generate a column name. - col_id: the original column whose name we'll use as the prefix in our copied column name. -*/ -DECLARE - original_col_name text; - idx integer := 1; -BEGIN - original_col_name := attname FROM pg_attribute WHERE attrelid=tab_id AND attnum=col_id; - WHILE format('%s %s', original_col_name, idx) IN ( - SELECT attname FROM pg_attribute WHERE attrelid=tab_id - ) LOOP - idx = idx + 1; - END LOOP; - RETURN format('%s %s', original_col_name, idx); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION msar.get_duplicate_col_defs( - tab_id oid, - col_ids smallint[], - new_names text[], - copy_defaults boolean -) RETURNS __msar.col_def[] AS $$/* -Get an array of __msar.col_def from given columns in a table. - -Args: - tab_id: The OID of the table containing the column whose definition we want. - col_ids: The attnums of the columns whose definitions we want. - new_names: The desired names of the column defs. Must be in same order as col_ids, and same - length. - copy_defaults: Whether or not we should copy the defaults -*/ -SELECT array_agg( - ( - -- build a name for the duplicate column - quote_ident(COALESCE(new_name, msar.get_fresh_copy_name(tab_id, pg_columns.attnum))), - -- build text specifying the type of the duplicate column - format_type(atttypid, atttypmod), - -- set the duplicate column to be nullable, since it will initially be empty - false, - -- set the default value for the duplicate column if specified - CASE WHEN copy_defaults THEN pg_get_expr(adbin, tab_id) END, - -- We don't set a duplicate column as a primary key, since that would cause an error. - false, - msar.col_description(tab_id, pg_columns.attnum) - )::__msar.col_def -) -FROM pg_attribute AS pg_columns - JOIN unnest(col_ids, new_names) AS columns_to_copy(col_id, new_name) - ON pg_columns.attnum=columns_to_copy.col_id - LEFT JOIN pg_attrdef AS pg_column_defaults - ON pg_column_defaults.adnum=pg_columns.attnum AND pg_columns.attrelid=pg_column_defaults.adrelid -WHERE pg_columns.attrelid=tab_id; -$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.build_unique_column_name_unquoted(tab_id oid, col_name text) RETURNS text AS $$/* -Get a unique column name based on the given name. - -Args: - tab_id: The OID of the table where the column name should be unique. - col_name: The resulting column name will be equal to or at least based on this. - -See the msar.get_fresh_copy_name function for how unique column names are generated. -*/ -DECLARE - col_attnum smallint; -BEGIN - col_attnum := msar.get_attnum(tab_id, col_name); - RETURN CASE - WHEN col_attnum IS NOT NULL THEN msar.get_fresh_copy_name(tab_id, col_attnum) ELSE col_name - END; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.build_unique_fkey_column_name(tab_id oid, fk_col_name text, frel_name text) - RETURNS text AS $$/* -Create a unique name for a foreign key column. - -Args: - tab_id: The OID of the table where the column name should be unique. - fk_col_name: The base name for the foreign key column. - frel_name: The name of the referent table. Used for creating fk_col_name if not given. - -Note that frel_name will be used to build the foreign key column name if it's not given. The result -will be of the form: _id. Then, we apply some logic to ensure the result is unique. -*/ -BEGIN - fk_col_name := COALESCE(fk_col_name, format('%s_id', frel_name)); - RETURN msar.build_unique_column_name_unquoted(tab_id, fk_col_name); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION -msar.get_extracted_col_def_jsonb(tab_id oid, col_ids integer[]) RETURNS jsonb AS $$/* -Get a JSON array of column definitions from given columns for creation of an extracted table. - -See the msar.process_col_def_jsonb for a description of the JSON. - -Args: - tab_id: The OID of the table containing the columns whose definitions we want. - col_ids: The attnum of the columns whose definitions we want. -*/ - -SELECT jsonb_agg( - jsonb_build_object( - 'name', attname, - 'type', jsonb_build_object('id', atttypid, 'modifier', atttypmod), - 'not_null', attnotnull, - 'default', - -- We only copy non-dynamic default expressions to new table to avoid double-use of sequences. - -- Sequences are owned by a specific column, and can't be reused without error. - CASE WHEN NOT msar.is_default_possibly_dynamic(tab_id, col_id) THEN - pg_get_expr(adbin, tab_id) - END - ) -) -FROM pg_attribute AS pg_columns - JOIN unnest(col_ids) AS columns_to_copy(col_id) - ON pg_columns.attnum=columns_to_copy.col_id - LEFT JOIN pg_attrdef AS pg_column_defaults - ON pg_column_defaults.adnum=pg_columns.attnum AND pg_columns.attrelid=pg_column_defaults.adrelid -WHERE pg_columns.attrelid=tab_id AND NOT msar.is_pkey_col(tab_id, col_id); -$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; - - --- Add columns to table ---------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.prepare_fields_arg(fields text) RETURNS text AS $$/* -Convert the `fields` argument into an integer for use with the integertypmodin system function. - -Args: - fields: A string corresponding to the documented options from the doumentation at - https://www.postgresql.org/docs/13/datatype-datetime.html - -In order to construct the argument for intervaltypmodin, needed for constructing the typmod value -for INTERVAL types with arguments, we need to apply a transformation to the correct integer. This -transformation is quite arcane, and is lifted straight from the PostgreSQL C code. Given a non-null -fields argument, the steps are: -- Assign each substring of valid `fields` arguments the correct integer (from the Postgres src). -- Apply a bitshift mapping each integer to the according power of 2. -- Sum the results to get an integer signifying the fields argument. -*/ -SELECT COALESCE( - sum(1<> 'length', - CASE WHEN timespan_flag THEN __msar.prepare_fields_arg(typ_options ->> 'fields') END, - typ_options ->> 'precision', - typ_options ->> 'scale' - ], - null -)::cstring[] -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -__msar.get_formatted_base_type(typ_name text, typ_options jsonb) RETURNS text AS $$ /* -Build the appropriate type definition string, without Array brackets. - -This function uses some PostgreSQL internal functions to do its work. In particular, for any type -that takes options, This function uses the typmodin (read "type modification input") system -functions to convert the given options into a typmod integer. The typ_name given is converted into -the OID of the named type. These two pieces let us call `format_type` to get a canonical string -representation of the definition of the type, with its options. - -Args: - typ_name: This should be qualified and quoted as needed. - typ_options: These should be in the form described in msar.build_type_text. -*/ -DECLARE - typ_id oid; - timespan_flag boolean; - typmodin_func text; - typmod integer; -BEGIN - -- Here we just get the OID of the type. - typ_id := typ_name::regtype::oid; - -- This is a lookup of the function name for the typmodin function associated with the type, if - -- one exists. - typmodin_func := typmodin::text FROM pg_type WHERE oid=typ_id AND typmodin<>0; - -- This flag is needed since timespan types need special handling when converting the options into - -- the form needed to call the typmodin function. - timespan_flag := typcategory='T' FROM pg_type WHERE oid=typ_id; - IF ( - jsonb_typeof(typ_options) = 'null' -- The caller passed no type options - OR typ_options IS NULL -- The caller didn't even pass the type options key - OR typ_options='{}'::jsonb -- The caller passed an empty type options object - OR typmodin_func IS NULL -- The type doesn't actually accept type options - ) THEN - typmod := NULL; - ELSE - -- Here, we actually run the typmod function to get the output for use in the format_type call. - EXECUTE format( - 'SELECT %I(%L)', - typmodin_func, - __msar.build_typmodin_arg(typ_options, timespan_flag) - ) INTO typmod; - END IF; - RETURN format_type(typ_id::integer, typmod::integer); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION -msar.build_type_text(typ_jsonb jsonb) RETURNS text AS $$/* -Turns the given type-describing JSON into a proper string defining a type with arguments - -The input JSON should be of the form - { - "id": - "schema": , - "name": , - "modifier": , - "options": { - "length": , - "precision": , - "scale": - "fields": , - "array": - } - } - -All fields are optional, and a null value as input returns 'text' -*/ -SELECT COALESCE( - -- First choice is the type specified by numeric IDs, since they're most reliable. - format_type( - (typ_jsonb ->> 'id')::integer, - (typ_jsonb ->> 'modifier')::integer - ), - -- Second choice is the type specified by string IDs. - __msar.get_formatted_base_type( - COALESCE( - msar.get_fully_qualified_object_name(typ_jsonb ->> 'schema', typ_jsonb ->> 'name'), - typ_jsonb ->> 'name', - 'text' -- We fall back to 'text' when input is null or empty. - ), - typ_jsonb -> 'options' - ) || CASE - WHEN (typ_jsonb -> 'options' ->> 'array')::boolean THEN - '[]' - ELSE '' - END -) -$$ LANGUAGE SQL; - - -CREATE OR REPLACE FUNCTION -msar.build_type_text_complete(typ_jsonb jsonb, old_type text) RETURNS text AS $$/* -Build the text name of a type, using the old type as a base if only options are given. - -The main use for this is to allow for altering only the options of the type of a column. - -Args: - typ_jsonb: This is a jsonb denoting the new type. - old_type: This is the old type name, with no options. - -The typ_jsonb should be in the form: -{ - "name": (optional), - "options": (optional) -} - -*/ -SELECT msar.build_type_text( - jsonb_strip_nulls( - jsonb_build_object( - 'name', COALESCE(typ_jsonb ->> 'name', old_type), - 'options', typ_jsonb -> 'options' - ) - ) -); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - - -CREATE OR REPLACE FUNCTION __msar.build_col_def_text(col __msar.col_def) RETURNS text AS $$/* -Build appropriate text defining the given column for table creation or alteration. -*/ -SELECT format( - '%s %s %s %s %s', - col.name_, - col.type_, - CASE WHEN col.not_null THEN 'NOT NULL' END, - 'DEFAULT ' || col.default_, - -- This can be used to define our default Mathesar primary key column. - -- TODO: We should really consider doing GENERATED *ALWAYS* (rather than BY DEFAULT), but this - -- breaks some other assumptions. - CASE WHEN col.identity_ THEN 'GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY' END -); -$$ LANGUAGE SQL; - - -CREATE OR REPLACE FUNCTION -msar.process_col_def_jsonb( - tab_id oid, - col_defs jsonb, - raw_default boolean, - create_id boolean DEFAULT false -) RETURNS __msar.col_def[] AS $$/* -Create an __msar.col_def from a JSON array of column creation defining JSON blobs. - -Args: - tab_id: The OID of the table where we'll create the columns - col_defs: A jsonb array defining a column creation (must have "type" key; "name", - "not_null", and "default" keys optional). - raw_default: This boolean tells us whether we chould reproduce the default with or without quoting - and escaping. True means we don't quote or escape, but just use the raw value. - create_id: This boolean defines whether or not we should automatically add a default Mathesar 'id' - column to the input. - -The col_defs should have the form: -[ - { - "name": (optional), - "type": { - "name": (optional), - "options": (optional), - }, - "not_null": (optional; default false), - "default": (optional), - "description": (optional) - }, - { - ... - } -] - -For more info on the type.options object, see the msar.build_type_text function. All pieces are -optional. If an empty object {} is given, the resulting column will have a default name like -'Column ' and type TEXT. It will allow nulls and have a null default value. -*/ -WITH attnum_cte AS ( - SELECT MAX(attnum) AS m_attnum FROM pg_attribute WHERE attrelid=tab_id -), col_create_cte AS ( - SELECT ( - -- build a name for the column - COALESCE( - quote_ident(col_def_obj ->> 'name'), - quote_ident('Column ' || (attnum_cte.m_attnum + ROW_NUMBER() OVER ())), - quote_ident('Column ' || (ROW_NUMBER() OVER ())) - ), - -- build the column type - msar.build_type_text(col_def_obj -> 'type'), - -- set the not_null value for the column - col_def_obj ->> 'not_null', - -- set the default value for the column - CASE - WHEN col_def_obj ->> 'default' IS NULL THEN - NULL - WHEN raw_default THEN - col_def_obj ->> 'default' - ELSE - format('%L', col_def_obj ->> 'default') - END, - -- We don't allow setting the primary key column manually - false, - -- Set the description for the column - quote_literal(col_def_obj ->> 'description') - )::__msar.col_def AS col_defs - FROM attnum_cte, jsonb_array_elements(col_defs) AS col_def_obj - WHERE col_def_obj ->> 'name' IS NULL OR col_def_obj ->> 'name' <> 'id' -) -SELECT array_cat( - CASE - WHEN create_id THEN - -- The below tuple defines a default 'id' column for Mathesar. It has name id, type integer, - -- it's not null, it uses the 'identity' functionality to generate default values, has - -- a default comment. - ARRAY[('id', 'integer', true, null, true, 'Mathesar default ID column')]::__msar.col_def[] - END, - array_agg(col_defs) -) -FROM col_create_cte; -$$ LANGUAGE SQL; - - -CREATE OR REPLACE FUNCTION -__msar.add_columns(tab_name text, col_defs variadic __msar.col_def[]) RETURNS text AS $$/* -Add the given columns to the given table. - -Args: - tab_name: Fully-qualified, quoted table name. - col_defs: The columns to be added. -*/ -WITH ca_cte AS ( - SELECT string_agg( - 'ADD COLUMN ' || __msar.build_col_def_text(col), - ', ' - ) AS col_additions - FROM unnest(col_defs) AS col -) -SELECT __msar.exec_ddl('ALTER TABLE %s %s', tab_name, col_additions) FROM ca_cte; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.add_columns(tab_id oid, col_defs jsonb, raw_default boolean DEFAULT false) - RETURNS smallint[] AS $$/* -Add columns to a table. - -Args: - tab_id: The OID of the table to which we'll add columns. - col_defs: a JSONB array defining columns to add. See msar.process_col_def_jsonb for details. - raw_default: Whether to treat defaults as raw SQL. DANGER! -*/ -DECLARE - col_create_defs __msar.col_def[]; - fq_table_name text := __msar.get_relation_name(tab_id); -BEGIN - col_create_defs := msar.process_col_def_jsonb(tab_id, col_defs, raw_default); - PERFORM __msar.add_columns(fq_table_name, variadic col_create_defs); - - PERFORM - __msar.comment_on_column( - fq_table_name, - col_create_def.name_, - col_create_def.description - ) - FROM unnest(col_create_defs) AS col_create_def - WHERE col_create_def.description IS NOT NULL; - - RETURN array_agg(attnum) - FROM (SELECT * FROM pg_attribute WHERE attrelid=tab_id) L - INNER JOIN unnest(col_create_defs) R - ON quote_ident(L.attname) = R.name_; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.add_columns(sch_name text, tab_name text, col_defs jsonb, raw_default boolean) - RETURNS smallint[] AS $$/* -Add columns to a table. - -Args: - sch_name: unquoted schema name of the table to which we'll add columns. - tab_name: unquoted, unqualified name of the table to which we'll add columns. - col_defs: a JSONB array defining columns to add. See msar.process_col_def_jsonb for details. - raw_default: Whether to treat defaults as raw SQL. DANGER! -*/ -SELECT msar.add_columns(msar.get_relation_oid(sch_name, tab_name), col_defs, raw_default); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- MATHESAR ADD CONSTRAINTS FUNCTIONS --- --- Add constraints to tables and (for NOT NULL) columns. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - --- Constraint creation definition type ------------------------------------------------------------- - -DROP TYPE IF EXISTS __msar.con_def CASCADE; -CREATE TYPE __msar.con_def AS ( -/* -This should be used in the context of a single ALTER TABLE command. So, no need to reference the -constrained table's OID. -*/ - name_ text, -- The name of the constraint to create, qualified and quoted. - type_ "char", -- The type of constraint to create, as a "char". See pg_constraint.contype - col_names text[], -- The columns for the constraint, quoted. - deferrable_ boolean, -- Whether or not the constraint is deferrable. - fk_rel_name text, -- The foreign table for an fkey, qualified and quoted. - fk_col_names text[], -- The foreign table's columns for an fkey, quoted. - fk_upd_action "char", -- Action taken when fk referent is updated. See pg_constraint.confupdtype. - fk_del_action "char", -- Action taken when fk referent is deleted. See pg_constraint.confdeltype. - fk_match_type "char", -- The match type of the fk constraint. See pg_constraint.confmatchtype. - expression text -- Text SQL giving the expression for the constraint (if applicable). -); - - -CREATE OR REPLACE FUNCTION msar.get_fkey_action_from_char("char") RETURNS text AS $$/* -Map the "char" from pg_constraint to the update or delete action string. -*/ -SELECT CASE - WHEN $1 = 'a' THEN 'NO ACTION' - WHEN $1 = 'r' THEN 'RESTRICT' - WHEN $1 = 'c' THEN 'CASCADE' - WHEN $1 = 'n' THEN 'SET NULL' - WHEN $1 = 'd' THEN 'SET DEFAULT' -END; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION msar.get_fkey_match_type_from_char("char") RETURNS text AS $$/* -Convert a char to its proper string describing the match type. - -NOTE: Since 'PARTIAL' is not implemented (and throws an error), we don't use it here. -*/ -SELECT CASE - WHEN $1 = 'f' THEN 'FULL' - WHEN $1 = 's' THEN 'SIMPLE' -END; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION __msar.build_con_def_text(con __msar.con_def) RETURNS text AS $$/* -Build appropriate text defining the given constraint for table creation or alteration. - -If the given con.name_ is null, the syntax changes slightly (we don't add 'CONSTRAINT'). The FOREIGN -KEY constraint has a number of extra strings that may or may not be appended. The best -documentation for this is the FOREIGN KEY section of the CREATE TABLE docs: -https://www.postgresql.org/docs/current/sql-createtable.html - -One helpful note is that this function makes use heavy of the || operator. This operator returns -null if either side is null, and thus - - 'CONSTRAINT ' || con.name_ || ' ' - -is 'CONSTRAINT ' when con.name_ is not null, and simply null if con.name_ is null. -*/ -SELECT CASE - WHEN con.type_ = 'u' THEN -- It's a UNIQUE constraint - format( - '%sUNIQUE %s', - 'CONSTRAINT ' || con.name_ || ' ', - __msar.build_text_tuple(con.col_names) - ) - WHEN con.type_ = 'p' THEN -- It's a PRIMARY KEY constraint - format( - '%sPRIMARY KEY %s', - 'CONSTRAINT ' || con.name_ || ' ', - __msar.build_text_tuple(con.col_names) - ) - WHEN con.type_ = 'f' THEN -- It's a FOREIGN KEY constraint - format( - '%sFOREIGN KEY %s REFERENCES %s%s%s%s%s', - 'CONSTRAINT ' || con.name_ || ' ', - __msar.build_text_tuple(con.col_names), - con.fk_rel_name, - __msar.build_text_tuple(con.fk_col_names), - ' MATCH ' || msar.get_fkey_match_type_from_char(con.fk_match_type), - ' ON DELETE ' || msar.get_fkey_action_from_char(con.fk_del_action), - ' ON UPDATE ' || msar.get_fkey_action_from_char(con.fk_upd_action) - ) - ELSE - NULL - END - || CASE WHEN con.deferrable_ THEN 'DEFERRABLE' ELSE '' END; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.process_con_def_jsonb(tab_id oid, con_create_arr jsonb) - RETURNS __msar.con_def[] AS $$/* -Create an array of __msar.con_def from a JSON array of constraint creation defining JSON. - -Args: - tab_id: The OID of the table where we'll create the constraints. - con_create_arr: A jsonb array defining a constraint creation (must have "type" key; "name", - "not_null", and "default" keys optional). - - -The con_create_arr should have the form: -[ - { - "name": (optional), - "type": , - "columns": [, , ...], - "deferrable": (optional), - "fkey_relation_id": (optional), - "fkey_relation_schema": (optional), - "fkey_relation_name": (optional), - "fkey_columns": [, , ...] (optional), - "fkey_update_action": (optional), - "fkey_delete_action": (optional), - "fkey_match_type": (optional), - }, - { - ... - } -] -If the constraint type is "f", then we require -- fkey_relation_id or (fkey_relation_schema and fkey_relation_name). - -Numeric IDs are preferred over textual ones where both are accepted. -*/ -SELECT array_agg( - ( - -- build the name for the constraint, properly quoted. - quote_ident(con_create_obj ->> 'name'), - -- set the constraint type as a single char. See __msar.build_con_def_text for details. - con_create_obj ->> 'type', - -- Set the column names associated with the constraint. - msar.get_column_names(tab_id, con_create_obj -> 'columns'), - -- Set whether the constraint is deferrable or not (boolean). - con_create_obj ->> 'deferrable', - -- Build the relation name where the constraint will be applied. Prefer numeric ID. - COALESCE( - __msar.get_relation_name((con_create_obj -> 'fkey_relation_id')::integer::oid), - msar.get_fully_qualified_object_name( - con_create_obj ->> 'fkey_relation_schema', con_create_obj ->> 'fkey_relation_name' - ) - ), - -- Build the array of foreign columns for an fkey constraint. - msar.get_column_names( - COALESCE( - -- We validate that the given OID (if any) is correct. - (con_create_obj -> 'fkey_relation_id')::integer::oid, - -- If given a schema, name pair, we get the OID from that (and validate it). - msar.get_relation_oid( - con_create_obj ->> 'fkey_relation_schema', con_create_obj ->> 'fkey_relation_name' - ) - ), - con_create_obj -> 'fkey_columns' - ), - -- The below are passed directly. They define some parameters for FOREIGN KEY constraints. - con_create_obj ->> 'fkey_update_action', - con_create_obj ->> 'fkey_delete_action', - con_create_obj ->> 'fkey_match_type', - null -- not yet implemented - )::__msar.con_def -) FROM jsonb_array_elements(con_create_arr) AS x(con_create_obj); -$$ LANGUAGE SQL; - - -CREATE OR REPLACE FUNCTION -__msar.add_constraints(tab_name text, con_defs variadic __msar.con_def[]) - RETURNS TEXT AS $$/* -Add the given constraints to the given table. - -Args: - tab_name: Fully-qualified, quoted table name. - con_defs: The constraints to be added. -*/ -WITH con_cte AS ( - SELECT string_agg('ADD ' || __msar.build_con_def_text(con), ', ') as con_additions - FROM unnest(con_defs) as con -) -SELECT __msar.exec_ddl('ALTER TABLE %s %s', tab_name, con_additions) FROM con_cte; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.add_constraints(tab_id oid, con_defs jsonb) RETURNS oid[] AS $$/* -Add constraints to a table. - -Args: - tab_id: The OID of the table to which we'll add constraints. - col_defs: a JSONB array defining constraints to add. See msar.process_con_def_jsonb for details. -*/ -DECLARE - con_create_defs __msar.con_def[]; -BEGIN - con_create_defs := msar.process_con_def_jsonb(tab_id, con_defs); - PERFORM __msar.add_constraints(__msar.get_relation_name(tab_id), variadic con_create_defs); - RETURN array_agg(oid) FROM pg_constraint WHERE conrelid=tab_id; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.add_constraints(sch_name text, tab_name text, con_defs jsonb) - RETURNS oid[] AS $$/* -Add constraints to a table. - -Args: - sch_name: unquoted schema name of the table to which we'll add constraints. - tab_name: unquoted, unqualified name of the table to which we'll add constraints. - con_defs: a JSONB array defining constraints to add. See msar.process_con_def_jsonb for details. -*/ -SELECT msar.add_constraints(msar.get_relation_oid(sch_name, tab_name), con_defs); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -DROP TYPE IF EXISTS __msar.not_null_def CASCADE; -CREATE TYPE __msar.not_null_def AS ( - col_name text, -- The column to be modified, quoted. - not_null boolean -- The value to set for null or not null. -); - - -CREATE OR REPLACE FUNCTION -__msar.set_not_nulls(tab_name text, not_null_defs __msar.not_null_def[]) RETURNS TEXT AS $$/* -Set or drop not null constraints on columns -*/ -WITH not_null_cte AS ( - SELECT string_agg( - CASE - WHEN not_null_def.not_null=true THEN format('ALTER %s SET NOT NULL', not_null_def.col_name) - WHEN not_null_def.not_null=false THEN format ('ALTER %s DROP NOT NULL', not_null_def.col_name) - END, - ', ' - ) AS not_nulls - FROM unnest(not_null_defs) as not_null_def -) -SELECT __msar.exec_ddl('ALTER TABLE %s %s', tab_name, not_nulls) FROM not_null_cte; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.copy_constraint(con_id oid, from_col_id smallint, to_col_id smallint) - RETURNS oid[] AS $$/* -Copy a single constraint associated with a column. - -Given a column with attnum 3 involved in the original constraint, and a column with attnum 4 to be -involved in the constraint copy, and other columns 1 and 2 involved in the constraint, suppose the -original constraint had conkey [1, 2, 3]. The copy constraint should then have conkey [1, 2, 4]. - -For now, this is only implemented for unique constraints. - -Args: - con_id: The oid of the constraint we'll copy. - from_col_id: The column ID to be removed from the original's conkey in the copy. - to_col_id: The column ID to be added to the original's conkey in the copy. -*/ -WITH - con_cte AS (SELECT * FROM pg_constraint WHERE oid=con_id AND contype='u'), - con_def_cte AS ( - SELECT jsonb_agg( - jsonb_build_object( - 'name', null, - 'type', con_cte.contype, - 'columns', array_replace(con_cte.conkey, from_col_id, to_col_id) - ) - ) AS con_def FROM con_cte - ) -SELECT msar.add_constraints(con_cte.conrelid, con_def_cte.con_def) FROM con_cte, con_def_cte; -$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.copy_column( - tab_id oid, col_id smallint, copy_name text, copy_data boolean, copy_constraints boolean -) RETURNS smallint AS $$/* -Copy a column of a table -*/ -DECLARE - col_defs __msar.col_def[]; - tab_name text; - col_name text; - created_col_id smallint; -BEGIN - col_defs := msar.get_duplicate_col_defs( - tab_id, ARRAY[col_id], ARRAY[copy_name], copy_data - ); - tab_name := __msar.get_relation_name(tab_id); - col_name := msar.get_column_name(tab_id, col_id); - PERFORM __msar.add_columns(tab_name, VARIADIC col_defs); - created_col_id := attnum - FROM pg_attribute - WHERE attrelid=tab_id AND quote_ident(attname)=col_defs[1].name_; - IF copy_data THEN - PERFORM __msar.exec_ddl( - 'UPDATE %s SET %s=%s', - tab_name, col_defs[1].name_, msar.get_column_name(tab_id, col_id) - ); - END IF; - IF copy_constraints THEN - PERFORM msar.copy_constraint(oid, col_id, created_col_id) - FROM pg_constraint - WHERE conrelid=tab_id AND ARRAY[col_id] <@ conkey; - PERFORM __msar.set_not_nulls( - tab_name, ARRAY[(col_defs[1].name_, attnotnull)::__msar.not_null_def] - ) - FROM pg_attribute WHERE attrelid=tab_id AND attnum=col_id; - END IF; - RETURN created_col_id; -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION -msar.get_extracted_con_def_jsonb(tab_id oid, col_ids integer[]) RETURNS jsonb AS $$/* -Get a JSON array of constraint definitions from given columns for creation of an extracted table. - -See the msar.process_con_def_jsonb for a description of the JSON. - -Args: - tab_id: The OID of the table containing the constraints whose definitions we want. - col_ids: The attnum of columns with the constraints whose definitions we want. -*/ - -SELECT jsonb_agg( - jsonb_build_object( - 'type', contype, - 'columns', ARRAY[attname], - 'deferrable', condeferrable, - 'fkey_relation_id', confrelid::integer, - 'fkey_columns', confkey, - 'fkey_update_action', confupdtype, - 'fkey_delete_action', confdeltype, - 'fkey_match_type', confmatchtype - ) -) -FROM pg_constraint - JOIN unnest(col_ids) AS columns_to_copy(col_id) ON pg_constraint.conkey[1]=columns_to_copy.col_id - JOIN pg_attribute - ON pg_attribute.attnum=columns_to_copy.col_id AND pg_attribute.attrelid=pg_constraint.conrelid -WHERE pg_constraint.conrelid=tab_id AND (pg_constraint.contype='f' OR pg_constraint.contype='u'); -$$ LANGUAGE sql RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- MATHESAR DROP TABLE FUNCTIONS --- --- Drop a table. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - --- Drop table -------------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.drop_table(tab_name text, cascade_ boolean, if_exists boolean) RETURNS text AS $$/* -Drop a table, returning the command executed. - -Args: - tab_name: The qualified, quoted name of the table we will drop. - cascade_: Whether to add CASCADE. - if_exists_: Whether to ignore an error if the table doesn't exist -*/ -DECLARE - cmd_template TEXT; -BEGIN - IF if_exists - THEN - cmd_template := 'DROP TABLE IF EXISTS %s'; - ELSE - cmd_template := 'DROP TABLE %s'; - END IF; - IF cascade_ - THEN - cmd_template = cmd_template || ' CASCADE'; - END IF; - RETURN __msar.exec_ddl(cmd_template, tab_name); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.drop_table(tab_id oid, cascade_ boolean, if_exists boolean) RETURNS text AS $$/* -Drop a table, returning the command executed. - -Args: - tab_id: The OID of the table to drop - cascade_: Whether to drop dependent objects. - if_exists_: Whether to ignore an error if the table doesn't exist -*/ -BEGIN - RETURN __msar.drop_table(__msar.get_relation_name(tab_id), cascade_, if_exists); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.drop_table(sch_name text, tab_name text, cascade_ boolean, if_exists boolean) - RETURNS text AS $$/* -Drop a table, returning the command executed. - -Args: - sch_name: The schema of the table to drop. - tab_name: The name of the table to drop. - cascade_: Whether to drop dependent objects. - if_exists_: Whether to ignore an error if the table doesn't exist -*/ -DECLARE qualified_tab_name text; -BEGIN - qualified_tab_name := msar.get_fully_qualified_object_name(sch_name, tab_name); - RETURN __msar.drop_table(qualified_tab_name, cascade_, if_exists); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- MATHESAR DROP CONSTRAINT FUNCTIONS --- --- Drop a constraint. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - --- Drop constraint --------------------------------------------------------------------------------- - - -CREATE OR REPLACE FUNCTION -__msar.drop_constraint(tab_name text, con_name text) RETURNS text AS $$/* -Drop a constraint, returning the command executed. - -Args: - tab_name: A qualified & quoted name of the table that has the constraint to be dropped. - con_name: Name of the constraint to drop, properly quoted. -*/ -BEGIN - RETURN __msar.exec_ddl( - 'ALTER TABLE %s DROP CONSTRAINT %s', tab_name, con_name - ); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.drop_constraint(sch_name text, tab_name text, con_name text) RETURNS text AS $$/* -Drop a constraint, returning the command executed. - -Args: - sch_name: The name of the schema where the table with constraint to be dropped resides, unquoted. - tab_name: The name of the table that has the constraint to be dropped, unquoted. - con_name: Name of the constraint to drop, unquoted. -*/ -BEGIN - RETURN __msar.drop_constraint( - msar.get_fully_qualified_object_name(sch_name, tab_name), quote_ident(con_name) - ); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.drop_constraint(tab_id oid, con_id oid) RETURNS TEXT AS $$/* -Drop a constraint, returning the command executed. - -Args: - tab_id: OID of the table that has the constraint to be dropped. - con_id: OID of the constraint to be dropped. -*/ -BEGIN - RETURN __msar.drop_constraint( - __msar.get_relation_name(tab_id), msar.get_constraint_name(con_id) - ); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - --- Create Mathesar table function - -CREATE OR REPLACE FUNCTION -__msar.add_table(tab_name text, col_defs __msar.col_def[], con_defs __msar.con_def[]) - RETURNS text AS $$/* -Add a table, returning the command executed. - -Args: - tab_name: A qualified & quoted name for the table to be added. - col_defs: An array of __msar.col_def defining the column set of the new table. - con_defs (optional): An array of __msar.con_def defining the constraints for the new table. - -Note: Even if con_defs is null, there can be some column-level constraints set in col_defs. -*/ -WITH col_cte AS ( - SELECT string_agg(__msar.build_col_def_text(col), ', ') AS table_columns - FROM unnest(col_defs) AS col -), con_cte AS ( - SELECT string_agg(__msar.build_con_def_text(con), ', ') AS table_constraints - FROM unnest(con_defs) as con -) -SELECT __msar.exec_ddl( - 'CREATE TABLE %s (%s)', - tab_name, - concat_ws(', ', table_columns, table_constraints) -) -FROM col_cte, con_cte; -$$ LANGUAGE SQL; - - -CREATE OR REPLACE FUNCTION -msar.add_mathesar_table(sch_oid oid, tab_name text, col_defs jsonb, con_defs jsonb, comment_ text) - RETURNS oid AS $$/* -Add a table, with a default id column, returning the OID of the created table. - -Args: - sch_oid: The OID of the schema where the table will be created. - tab_name: The unquoted name for the new table. - col_defs (optional): The columns for the new table, in order. - con_defs (optional): The constraints for the new table. - comment_ (optional): The comment for the new table. - -Note that even if col_defs is null, we will still create a table with a default 'id' column. Also, -if an 'id' column is given in the input, it will be replaced with our default 'id' column. This is -the behavior of the current python functions, so we're keeping it for now. In any case, the created -table will always have our default 'id' column as its first column. -*/ -DECLARE - fq_table_name text; - created_table_id oid; - column_defs __msar.col_def[]; - constraint_defs __msar.con_def[]; -BEGIN - fq_table_name := format('%s.%s', __msar.get_schema_name(sch_oid), quote_ident(tab_name)); - column_defs := msar.process_col_def_jsonb(0, col_defs, false, true); - constraint_defs := msar.process_con_def_jsonb(0, con_defs); - PERFORM __msar.add_table(fq_table_name, column_defs, constraint_defs); - created_table_id := fq_table_name::regclass::oid; - PERFORM msar.comment_on_table(created_table_id, comment_); - RETURN created_table_id; -END; -$$ LANGUAGE plpgsql; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- COLUMN ALTERATION FUNCTIONS --- --- Functions in this section should be related to altering columns' names, types, and constraints. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - --- Rename columns ---------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION -__msar.rename_column(tab_name text, old_col_name text, new_col_name text) RETURNS text AS $$/* -Change a column name, returning the command executed - -Args: - tab_name: The qualified, quoted name of the table where we'll change a column name - old_col_name: The quoted name of the column to change. - new_col_name: The quoted new name for the column. -*/ -DECLARE - cmd_template text; -BEGIN - cmd_template := 'ALTER TABLE %s RENAME COLUMN %s TO %s'; - IF old_col_name <> new_col_name THEN - RETURN __msar.exec_ddl(cmd_template, tab_name, old_col_name, new_col_name); - ELSE - RETURN null; - END IF; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.rename_column(tab_id oid, col_id integer, new_col_name text) RETURNS smallint AS $$/* -Change a column name, returning the command executed - -Args: - tab_id: The OID of the table whose column we're renaming - col_id: The ID of the column to rename - new_col_name: The unquoted new name for the column. -*/ -BEGIN - PERFORM __msar.rename_column( - tab_name => __msar.get_relation_name(tab_id), - old_col_name => msar.get_column_name(tab_id, col_id), - new_col_name => quote_ident(new_col_name) - ); - RETURN col_id; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION __msar.build_cast_expr(val text, type_ text) RETURNS text AS $$/* -Build an expression for casting a column in Mathesar, returning the text of that expression. - -We fall back silently to default casting behavior if the mathesar_types namespace is missing. -However, we do throw an error in cases where the schema exists, but the type casting function -doesn't. This is assumed to be an error the user should know about. - -Args: - val: This is quite general, and isn't sanitized in any way. It can be either a literal or a column - identifier, since we want to be able to produce a casting expression in either case. - type_: This type name string must cast properly to a regtype. -*/ -SELECT CASE - WHEN msar.schema_exists('mathesar_types') THEN - msar.get_cast_function_name(type_::regtype) || '(' || val || ')' - ELSE - val || '::' || type_::regtype::text -END; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -__msar.build_col_drop_default_expr(tab_id oid, col_id integer, new_type text, new_default jsonb) - RETURNS TEXT AS $$/* -Build an expression for dropping a column's default, returning the text of that expression. - -This function is private, and not general: It builds an expression in the context of the -msar.process_col_alter_jsonb function and should not otherwise be called independently, since it has -logic specific to that context. In that setting, we drop the default for the specified column if the -caller specifies that we're setting a new_default of NULL, or if we're changing the type of the -column. - -Args: - tab_id: The OID of the table where the column with the default to be dropped lives. - col_id: The attnum of the column with the undesired default. - new_type: This gives the function context letting it know whether to drop the default or not. If - we are setting a new type for the column, we will always drop the default first. - new_default: This also gives us context letting us know whether to drop the default. By setting - the 'new_default' to (jsonb) null, the caller specifies that we should drop the - column's default. -*/ -SELECT CASE WHEN new_type IS NOT NULL OR jsonb_typeof(new_default)='null' THEN - 'ALTER COLUMN ' || msar.get_column_name(tab_id, col_id) || ' DROP DEFAULT' - END; -$$ LANGUAGE SQL; - -CREATE OR REPLACE FUNCTION -__msar.build_col_retype_expr(tab_id oid, col_id integer, new_type text) RETURNS text AS $$/* -Build an expression to change a column's type, returning the text of that expression. - -Note that this function wraps the type alteration in a cast expression. If we have the custom -mathesar_types cast functions available, we prefer those to the default PostgreSQL casting behavior. - -Args: - tab_id: The OID of the table containing the column whose type we'll alter. - col_id: The attnum of the column whose type we'll alter. - new_type: The target type to which we'll alter the column. -*/ -SELECT 'ALTER COLUMN ' - || msar.get_column_name(tab_id, col_id) - || ' TYPE ' - || new_type - || ' USING ' - || __msar.build_cast_expr(msar.get_column_name(tab_id, col_id), new_type); -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION __msar.build_col_default_expr( - tab_id oid, - col_id integer, - old_default text, - new_default jsonb, - new_type text -) RETURNS text AS $$/* -Build an expression to set a column's default value, returning the text of that expression. - -This function is private, and not general. The expression it builds is in the context of the calling -msar.process_col_alter_jsonb function. In particular, this function can also reset the original -default after a column type alteration, but cast to the new type of the column. We also avoid -setting a new default in cases where the new default argument is (sql) NULL, or a JSONB null. - -Args: - tab_id: The OID of the table containing the column whose default we'll alter. - col_id: The attnum of the column whose default we'll alter. - old_default: The current default. In some cases in the context of the caller, we want to reset the - original default, but cast to a new type. - new_default: The new desired default. It's left as JSONB since we are using JSONB 'null' values to - represent 'drop the column default'. - new_type: The target type to which we'll cast the new default. -*/ -DECLARE - default_expr text; - raw_default_expr text; -BEGIN - -- In this case, we assume the intent is to clear out the original default. - IF jsonb_typeof(new_default)='null' THEN - default_expr := null; - -- We get the root JSONB value as text if it exists. - ELSEIF new_default #>> '{}' IS NOT NULL THEN - default_expr := format('%L', new_default #>> '{}'); -- sanitize since this could be user input. - -- At this point, we know we're not setting a new default, or dropping the old one. - -- So, we check whether the original default is potentially dynamic, and whether we need to cast - -- it to a new type. - ELSEIF msar.is_default_possibly_dynamic(tab_id, col_id) AND new_type IS NOT NULL THEN - -- We add casting the possibly dynamic expression to the new type as part of the default - -- expression in this case. - default_expr := __msar.build_cast_expr(old_default, new_type); - ELSEIF old_default IS NOT NULL AND new_type IS NOT NULL THEN - -- If we arrive here, then we know the old_default is a constant value, and we want to cast the - -- old default value to the new type *before* setting it as the new default. This avoids - -- building up nested cast functions in the default expression. - -- The first step is to execute the cast expression, putting the result into a new variable. - EXECUTE format('SELECT %s', __msar.build_cast_expr(old_default, new_type)) - INTO raw_default_expr; - -- Then we format that new variable's value as a literal. - default_expr := format('%L', raw_default_expr); - END IF; - RETURN - format('ALTER COLUMN %s SET DEFAULT ', msar.get_column_name(tab_id, col_id)) || default_expr; -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION -__msar.build_col_not_null_expr(tab_id oid, col_id integer, not_null boolean) RETURNS text AS $$/* -Build an expression to alter a column's NOT NULL setting, returning the text of that expression. - -Args: - tab_id: The OID of the table containing the column whose nullability we'll alter. - col_id: The attnum of the column whose nullability we'll alter. - not_null: If true, we 'SET NOT NULL'. If false, we 'DROP NOT NULL' if null, we do nothing. -*/ -SELECT 'ALTER COLUMN ' - || msar.get_column_name(tab_id, col_id) - || CASE WHEN not_null THEN ' SET ' ELSE ' DROP ' END - || 'NOT NULL'; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -__msar.build_col_drop_text(tab_id oid, col_id integer, col_delete boolean) RETURNS text AS $$/* -Build an expression to drop a column from a table, returning the text of that expression. - -Args: - tab_id: The OID of the table containing the column whose nullability we'll alter. - col_id: The attnum of the column whose nullability we'll alter. - col_delete: If true, we drop the column. If false or null, we do nothing. -*/ -SELECT CASE WHEN col_delete THEN 'DROP COLUMN ' || msar.get_column_name(tab_id, col_id) END; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.process_col_alter_jsonb(tab_id oid, col_alters jsonb) RETURNS text AS $$/* -Turn a JSONB array representing a set of desired column alterations into a text expression. - -Args: - tab_id The OID of the table whose columns we'll alter. - col_alters: a JSONB array defining the list of column alterations. - -The col_alters JSONB should have the form: -[ - { - "attnum": , - "type": (optional), - "default": (optional), - "not_null": (optional), - "delete": (optional), - "name": (optional), - }, - { - ... - }, - ... -] - -Notes on the col_alters JSONB -- For more info about the type object, see the msar.build_type_text function. -- The "name" key isn't used in this function; it's included here for completeness. -- A possible 'gotcha' is the "default" key. - - If omitted, no change to the default for the given column will occur, other than to cast it to - the new type if a type change is specified. - - If, on the other hand, the "default" key is set to an explicit value of null, then we will - interpret that as a directive to set the column's default to NULL, i.e., we'll drop the current - default setting. -- If the column is a default mathesar ID column, we will silently skip it so it won't be altered. -*/ -WITH prepped_alters AS ( - SELECT - tab_id, - (col_alter_obj ->> 'attnum')::integer AS col_id, - msar.build_type_text_complete(col_alter_obj -> 'type', format_type(atttypid, null)) AS new_type, - -- We get the old default expression from a catalog table before modifying anything, so we can - -- reset it properly if we alter the column type. - pg_get_expr(adbin, tab_id) old_default, - col_alter_obj -> 'default' AS new_default, - (col_alter_obj -> 'not_null')::boolean AS not_null, - (col_alter_obj -> 'delete')::boolean AS delete_ - FROM - (SELECT tab_id) as arg, - jsonb_array_elements(col_alters) as t(col_alter_obj) - INNER JOIN pg_attribute ON (t.col_alter_obj ->> 'attnum')::smallint=attnum AND tab_id=attrelid - LEFT JOIN pg_attrdef ON (t.col_alter_obj ->> 'attnum')::smallint=adnum AND tab_id=adrelid - WHERE NOT msar.is_mathesar_id_column(tab_id, (t.col_alter_obj ->> 'attnum')::integer) -) -SELECT string_agg( - nullif( - concat_ws( - ', ', - __msar.build_col_drop_default_expr(tab_id, col_id, new_type, new_default), - __msar.build_col_retype_expr(tab_id, col_id, new_type), - __msar.build_col_default_expr(tab_id, col_id, old_default, new_default, new_type), - __msar.build_col_not_null_expr(tab_id, col_id, not_null), - __msar.build_col_drop_text(tab_id, col_id, delete_) - ), - '' - ), - ', ' -) -FROM prepped_alters; -$$ LANGUAGE SQL RETURNS NULL ON NULL INPUT; - - -CREATE OR REPLACE FUNCTION -msar.alter_columns(tab_id oid, col_alters jsonb) RETURNS integer[] AS $$/* -Alter columns of the given table in bulk, returning the IDs of the columns so altered. - -Args: - tab_id: The OID of the table whose columns we'll alter. - col_alters: a JSONB describing the alterations to make. - -For the specification of the col_alters JSONB, see the msar.process_col_alter_jsonb function. - -Note that all alterations except renaming are done in bulk, and then all name changes are done one -at a time afterwards. This is because the SQL design specifies at most one name-changing clause per -query. -*/ -DECLARE - r RECORD; - col_alter_str TEXT; - description_alter RECORD; -BEGIN - -- Get the string specifying all non-name-change alterations to perform. - col_alter_str := msar.process_col_alter_jsonb(tab_id, col_alters); - - -- Perform the non-name-change alterations - IF col_alter_str IS NOT NULL THEN - PERFORM __msar.exec_ddl( - 'ALTER TABLE %s %s', - __msar.get_relation_name(tab_id), - msar.process_col_alter_jsonb(tab_id, col_alters) - ); - END IF; - - -- Here, we perform all description-changing alterations. - FOR description_alter IN - SELECT - (col_alter->>'attnum')::integer AS col_id, - col_alter->>'description' AS comment_ - FROM jsonb_array_elements(col_alters) AS col_alter - WHERE __msar.jsonb_key_exists(col_alter, 'description') - LOOP - PERFORM msar.comment_on_column( - tab_id := tab_id, - col_id := description_alter.col_id, - comment_ := description_alter.comment_ - ); - END LOOP; - - -- Here, we perform all name-changing alterations. - FOR r in SELECT attnum, name FROM jsonb_to_recordset(col_alters) AS x(attnum integer, name text) - LOOP - PERFORM msar.rename_column(tab_id, r.attnum, r.name); - END LOOP; - RETURN array_agg(x.attnum) FROM jsonb_to_recordset(col_alters) AS x(attnum integer); -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - --- Comment on column ------------------------------------------------------------------------------- - - -CREATE OR REPLACE FUNCTION -__msar.comment_on_column( - tab_name text, - col_name text, - comment_ text -) RETURNS text AS $$/* -Change the description of a column, returning command executed. If comment_ is NULL, column's -comment is removed. - -Args: - tab_name: The name of the table containg the column whose comment we will change. - col_name: The name of the column whose comment we'll change - comment_: The new comment. Any quotes or special characters must be escaped. -*/ -DECLARE - comment_or_null text := COALESCE(comment_, 'NULL'); -BEGIN -RETURN __msar.exec_ddl( - 'COMMENT ON COLUMN %s.%s IS %s', - tab_name, - col_name, - comment_or_null -); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION -msar.comment_on_column( - sch_name text, - tab_name text, - col_name text, - comment_ text -) RETURNS text AS $$/* -Change the description of a column, returning command executed. - -Args: - sch_name: The schema of the table whose column's comment we will change. - tab_name: The name of the table whose column's comment we will change. - col_name: The name of the column whose comment we will change. - comment_: The new comment. -*/ -SELECT __msar.comment_on_column( - msar.get_fully_qualified_object_name(sch_name, tab_name), - quote_ident(col_name), - quote_literal(comment_) -); -$$ LANGUAGE SQL; - - -CREATE OR REPLACE FUNCTION -__msar.comment_on_column( - tab_id oid, - col_id integer, - comment_ text -) RETURNS text AS $$/* -Change the description of a column, returning command executed. - -Args: - tab_id: The OID of the table containg the column whose comment we will change. - col_id: The ATTNUM of the column whose comment we will change. - comment_: The new comment. -*/ -SELECT __msar.comment_on_column( - __msar.get_relation_name(tab_id), - msar.get_column_name(tab_id, col_id), - comment_ -); -$$ LANGUAGE SQL; - - -CREATE OR REPLACE FUNCTION -msar.comment_on_column( - tab_id oid, - col_id integer, - comment_ text -) RETURNS text AS $$/* -Change the description of a column, returning command executed. - -Args: - tab_id: The OID of the table containg the column whose comment we will change. - col_id: The ATTNUM of the column whose comment we will change. - comment_: The new comment. -*/ -SELECT __msar.comment_on_column( - tab_id, - col_id, - quote_literal(comment_) -); -$$ LANGUAGE SQL; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- MATHESAR LINK FUNCTIONS --- --- Add a link to the table. ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - --- Create a Many-to-One or a One-to-One link ------------------------------------------------------- - - -CREATE OR REPLACE FUNCTION -msar.create_many_to_one_link( - frel_id oid, - rel_id oid, - col_name text, - unique_link boolean DEFAULT false -) RETURNS smallint AS $$/* -Create a many-to-one or a one-to-one link between tables, returning the attnum of the newly created -column, returning the attnum of the added column. - -Args: - frel_id: The OID of the referent table, named for confrelid in the pg_attribute table. - rel_id: The OID of the referrer table, named for conrelid in the pg_attribute table. - col_name: Name of the new column to be created in the referrer table, unquoted. - unique_link: Whether to make the link one-to-one instead of many-to-one. -*/ -DECLARE - pk_col_id smallint; - col_defs jsonb; - added_col_ids smallint[]; - con_defs jsonb; -BEGIN - pk_col_id := msar.get_pk_column(frel_id); - col_defs := jsonb_build_array( - jsonb_build_object( - 'name', col_name, - 'type', jsonb_build_object('name', msar.get_column_type(frel_id, pk_col_id)) - ) - ); - added_col_ids := msar.add_columns(rel_id , col_defs , false); - con_defs := jsonb_build_array( - jsonb_build_object( - 'name', null, - 'type', 'f', - 'columns', added_col_ids, - 'deferrable', false, - 'fkey_relation_id', frel_id::integer, - 'fkey_columns', jsonb_build_array(pk_col_id) - ) - ); - IF unique_link THEN - con_defs := jsonb_build_array( - jsonb_build_object( - 'name', null, - 'type', 'u', - 'columns', added_col_ids) - ) || con_defs; - END IF; - PERFORM msar.add_constraints(rel_id , con_defs); - RETURN added_col_ids[1]; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - --- Create a Many-to-Many link ---------------------------------------------------------------------- - - -CREATE OR REPLACE FUNCTION -msar.create_many_to_many_link( - sch_id oid, - tab_name text, - from_rel_ids oid[], - col_names text[] -) RETURNS oid AS $$/* -Create a many-to-many link between tables, returning the oid of the newly created table. - -Args: - sch_id: The OID of the schema in which new referrer table is to be created. - tab_name: Name of the referrer table to be created. - from_rel_ids: The OIDs of the referent tables. - col_names: Names of the new column to be created in the referrer table, unqoted. -*/ -DECLARE - added_table_id oid; -BEGIN - added_table_id := msar.add_mathesar_table(sch_id, tab_name , NULL, NULL, NULL); - PERFORM msar.create_many_to_one_link(a.rel_id, added_table_id, b.col_name) - FROM unnest(from_rel_ids) WITH ORDINALITY AS a(rel_id, idx) - JOIN unnest(col_names) WITH ORDINALITY AS b(col_name, idx) USING (idx); - RETURN added_table_id; -END; -$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; - - ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- --- TABLE SPLITTING FUNCTIONS --- --- Functions to extract columns from a table ----------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------- - - -CREATE OR REPLACE FUNCTION -msar.extract_columns_from_table( - tab_id oid, col_ids integer[], new_tab_name text, fk_col_name text -) RETURNS jsonb AS $f$/* -Extract columns from a table to create a new table, linked by a foreign key. - -Args: - tab_id: The OID of the table whose columns we'll extract - col_ids: An array of the attnums of the columns to extract - new_tab_name: The name of the new table to be made from the extracted columns, unquoted - fk_col_name: The name to give the new foreign key column in the remainder table (optional) - -The extraction takes a set of columns from the table, and creates a new table from the set of -*distinct* tuples those columns comprise. We also add a new foreign key column to the original - (remainder) table that links it to the new extracted table so they can be easily rejoined. The - extracted columns are removed from the remainder table. -*/ -DECLARE - extracted_col_defs CONSTANT jsonb := msar.get_extracted_col_def_jsonb(tab_id, col_ids); - extracted_con_defs CONSTANT jsonb := msar.get_extracted_con_def_jsonb(tab_id, col_ids); - fkey_name CONSTANT text := msar.build_unique_fkey_column_name(tab_id, fk_col_name, new_tab_name); - extracted_table_id integer; - fkey_attnum integer; -BEGIN - -- Begin by creating a new table with column definitions matching the extracted columns. - extracted_table_id := msar.add_mathesar_table( - msar.get_relation_namespace_oid(tab_id), - new_tab_name, - extracted_col_defs, - extracted_con_defs, - format('Extracted from %s', __msar.get_relation_name(tab_id)) - ); - -- Create a new fkey column and foreign key linking the original table to the extracted one. - fkey_attnum := msar.create_many_to_one_link(extracted_table_id, tab_id, fkey_name); - -- Insert the data from the original table's columns into the extracted columns, and add - -- appropriate fkey values to the new fkey column in the original table to give the proper - -- mapping. - PERFORM __msar.exec_ddl($t$ - WITH fkey_cte AS ( - SELECT id, %1$s, dense_rank() OVER (ORDER BY %1$s) AS __msar_tmp_id - FROM %2$s - ), ins_cte AS ( - INSERT INTO %3$s (%1$s) - SELECT DISTINCT %1$s FROM fkey_cte ORDER BY %1$s - ) - UPDATE %2$s SET %4$I=__msar_tmp_id FROM fkey_cte WHERE - %2$s.id=fkey_cte.id - $t$, - -- %1$s This is a comma separated string of the extracted column names - string_agg(quote_ident(col_def ->> 'name'), ', '), - -- %2$s This is the name of the original (remainder) table - __msar.get_relation_name(tab_id), - -- %3$s This is the new extracted table name - __msar.get_relation_name(extracted_table_id), - -- %4$I This is the name of the fkey column in the remainder table. - fkey_name - ) FROM jsonb_array_elements(extracted_col_defs) AS col_def; - -- Drop the original versions of the extracted columns from the original table. - PERFORM msar.drop_columns(tab_id, variadic col_ids); - -- In case the user wanted to give a name to the fkey column matching one of the extracted - -- columns, perform that operation now (since the original will now be dropped from the original - -- table) - IF fk_col_name IS NOT NULL AND fk_col_name IN ( - SELECT col_def ->> 'name' - FROM jsonb_array_elements(extracted_col_defs) AS col_def - ) THEN - PERFORM msar.rename_column(tab_id, fkey_attnum, fk_col_name); - END IF; - RETURN jsonb_build_array(extracted_table_id, fkey_attnum); -END; -$f$ LANGUAGE plpgsql; diff --git a/db/sql/1_msar_joinable_tables.sql b/db/sql/10_msar_joinable_tables.sql similarity index 64% rename from db/sql/1_msar_joinable_tables.sql rename to db/sql/10_msar_joinable_tables.sql index 96e255a4fd..0b49dd99f1 100644 --- a/db/sql/1_msar_joinable_tables.sql +++ b/db/sql/10_msar_joinable_tables.sql @@ -29,9 +29,10 @@ whether to travel from referrer to referant (when False) or from referant to ref */ -CREATE TYPE mathesar_types.joinable_tables AS ( - base integer, -- The OID of the table from which the paths start - target integer, -- The OID of the table where the paths end +DROP TYPE IF EXISTS msar.joinable_tables CASCADE; +CREATE TYPE msar.joinable_tables AS ( + base bigint, -- The OID of the table from which the paths start + target bigint, -- The OID of the table where the paths end join_path jsonb, -- A JSONB array of arrays of arrays fkey_path jsonb, depth integer, @@ -39,9 +40,10 @@ CREATE TYPE mathesar_types.joinable_tables AS ( ); +DROP FUNCTION IF EXISTS msar.get_joinable_tables(integer); CREATE OR REPLACE FUNCTION -msar.get_joinable_tables(max_depth integer) RETURNS SETOF mathesar_types.joinable_tables AS $$/* -This function returns a table of mathesar_types.joinable_tables objects, giving paths to various +msar.get_joinable_tables(max_depth integer) RETURNS SETOF msar.joinable_tables AS $$/* +This function returns a table of msar.joinable_tables objects, giving paths to various joinable tables. Args: @@ -53,9 +55,9 @@ restrictions in either way. */ WITH RECURSIVE symmetric_fkeys AS ( SELECT - c.oid fkey_oid, - c.conrelid::INTEGER left_rel, - c.confrelid::INTEGER right_rel, + c.oid::BIGINT fkey_oid, + c.conrelid::BIGINT left_rel, + c.confrelid::BIGINT right_rel, c.conkey[1]::INTEGER left_col, c.confkey[1]::INTEGER right_col, false multiple_results, @@ -64,9 +66,9 @@ WITH RECURSIVE symmetric_fkeys AS ( WHERE c.contype='f' and array_length(c.conkey, 1)=1 UNION ALL SELECT - c.oid fkey_oid, - c.confrelid::INTEGER left_rel, - c.conrelid::INTEGER right_rel, + c.oid::BIGINT fkey_oid, + c.confrelid::BIGINT left_rel, + c.conrelid::BIGINT right_rel, c.confkey[1]::INTEGER left_col, c.conkey[1]::INTEGER right_col, true multiple_results, @@ -127,11 +129,36 @@ UNION ALL FROM search_fkey_graph ) SELECT * FROM output_cte; -$$ LANGUAGE sql; +$$ LANGUAGE SQL STABLE; +DROP FUNCTION IF EXISTS msar.get_joinable_tables(integer, oid); CREATE OR REPLACE FUNCTION msar.get_joinable_tables(max_depth integer, table_id oid) RETURNS - SETOF mathesar_types.joinable_tables AS $$ - SELECT * FROM msar.get_joinable_tables(max_depth) WHERE base=table_id -$$ LANGUAGE sql; +jsonb AS $$ + WITH jt_cte AS ( + SELECT * FROM msar.get_joinable_tables(max_depth) WHERE base=table_id + ), target_cte AS ( + SELECT pga.attrelid AS tt_oid, + jsonb_build_object( + 'name', msar.get_relation_name(pga.attrelid), + 'columns', jsonb_object_agg( + pga.attnum, jsonb_build_object( + 'name', pga.attname, + 'type', CASE WHEN attndims>0 THEN '_array' ELSE atttypid::regtype::text END + ) + ) + ) AS tt_info + FROM pg_catalog.pg_attribute AS pga, jt_cte + WHERE pga.attrelid=jt_cte.target AND pga.attnum > 0 and NOT pga.attisdropped + GROUP BY pga.attrelid + ), joinable_tables AS ( + SELECT jsonb_agg(to_jsonb(jt_cte.*)) AS jt FROM jt_cte + ), target_table_info AS ( + SELECT jsonb_object_agg(tt_oid, tt_info) AS tt FROM target_cte + ) + SELECT jsonb_build_object( + 'joinable_tables', COALESCE(joinable_tables.jt, '[]'::jsonb), + 'target_table_info', COALESCE(target_table_info.tt, '{}'::jsonb) + ) FROM joinable_tables, target_table_info; +$$ LANGUAGE SQL STABLE RETURNS NULL ON NULL INPUT; diff --git a/db/sql/2_msar_views.sql b/db/sql/20_msar_views.sql similarity index 95% rename from db/sql/2_msar_views.sql rename to db/sql/20_msar_views.sql index 1b9e608880..6e4e4fb2cd 100644 --- a/db/sql/2_msar_views.sql +++ b/db/sql/20_msar_views.sql @@ -18,7 +18,7 @@ -- -- This file creates a schema `msar_views` where internal mathesar views will be stored. -- --- For naming conventions, see 0_msar.sql +-- For naming conventions, see 00_msar.sql ---------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------- CREATE SCHEMA IF NOT EXISTS msar_views; @@ -32,7 +32,7 @@ Args: tab_id: The OID of the table whose associated view we want to name. */ BEGIN - RETURN msar.get_fully_qualified_object_name('msar_views', format('mv%s', tab_id)); + RETURN __msar.build_qualified_name_sql('msar_views', format('mv%s', tab_id)); END; $$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; @@ -56,7 +56,7 @@ BEGIN INTO view_cols; RETURN __msar.exec_ddl( 'CREATE OR REPLACE VIEW %s AS SELECT %s FROM %s', - view_name, view_cols, __msar.get_relation_name(tab_id) + view_name, view_cols, __msar.get_qualified_relation_name(tab_id) ); END; $$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; diff --git a/db/sql/3_msar_custom_aggregates.sql b/db/sql/30_msar_custom_aggregates.sql similarity index 100% rename from db/sql/3_msar_custom_aggregates.sql rename to db/sql/30_msar_custom_aggregates.sql diff --git a/db/sql/40_msar_types.sql b/db/sql/40_msar_types.sql new file mode 100644 index 0000000000..e032ec42cc --- /dev/null +++ b/db/sql/40_msar_types.sql @@ -0,0 +1,4554 @@ +CREATE SCHEMA IF NOT EXISTS mathesar_types; + +-- mathesar_types.email +DO $$ +BEGIN + CREATE DOMAIN mathesar_types.email AS text CHECK (value ~ '^[a-zA-Z0-9.!#$%&''*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$'); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +CREATE OR REPLACE FUNCTION mathesar_types.email_domain_name(mathesar_types.email) +RETURNS text AS $$ + SELECT split_part($1, '@', 2); +$$ +LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.email_local_part(mathesar_types.email) +RETURNS text AS $$ + SELECT split_part($1, '@', 1); +$$ +LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.money +DO $$ +BEGIN + CREATE DOMAIN mathesar_types.mathesar_money AS NUMERIC; +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +-- mathesar_types.multicurrency +DO $$ +BEGIN + CREATE TYPE mathesar_types.multicurrency_money AS (value NUMERIC, currency CHAR(3)); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +-- mathesar_types.uri +CREATE OR REPLACE FUNCTION mathesar_types.uri_parts(text) +RETURNS text[] AS $$ + SELECT regexp_match($1, '^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?'); +$$ +LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.uri_scheme(text) +RETURNS text AS $$ + SELECT (mathesar_types.uri_parts($1))[2]; +$$ +LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.uri_authority(text) +RETURNS text AS $$ + SELECT (mathesar_types.uri_parts($1))[4]; +$$ +LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.uri_path(text) +RETURNS text AS $$ + SELECT (mathesar_types.uri_parts($1))[5]; +$$ +LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.uri_query(text) +RETURNS text AS $$ + SELECT (mathesar_types.uri_parts($1))[7]; +$$ +LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.uri_fragment(text) +RETURNS text AS $$ + SELECT (mathesar_types.uri_parts($1))[9]; +$$ +LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; + +DO $$ +BEGIN + CREATE DOMAIN mathesar_types.uri AS text CHECK ( + (value IS NULL) OR (mathesar_types.uri_scheme(value) IS NOT NULL + AND mathesar_types.uri_path(value) IS NOT NULL) + ); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +-- mathesar_types.json_array +DO $$ +BEGIN + CREATE DOMAIN mathesar_types.mathesar_json_array AS JSONB CHECK (jsonb_typeof(VALUE) = 'array'); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +-- mathesar_types.json_object +DO $$ +BEGIN + CREATE DOMAIN mathesar_types.mathesar_json_object AS JSONB CHECK (jsonb_typeof(VALUE) = 'object'); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +-- mathesar_types.cast_to_boolean +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(boolean) +RETURNS boolean +AS $$ + + BEGIN + RETURN $1::boolean; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(smallint) +RETURNS boolean +AS $$ + BEGIN + IF $1<>0 AND $1<>1 THEN + RAISE EXCEPTION '% is not a boolean', $1; END IF; + RETURN $1<>0; + END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(real) +RETURNS boolean +AS $$ + BEGIN + IF $1<>0 AND $1<>1 THEN + RAISE EXCEPTION '% is not a boolean', $1; END IF; + RETURN $1<>0; + END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(bigint) +RETURNS boolean +AS $$ + BEGIN + IF $1<>0 AND $1<>1 THEN + RAISE EXCEPTION '% is not a boolean', $1; END IF; + RETURN $1<>0; + END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(double precision) +RETURNS boolean +AS $$ + + BEGIN + IF $1<>0 AND $1<>1 THEN + RAISE EXCEPTION '% is not a boolean', $1; END IF; + RETURN $1<>0; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(numeric) +RETURNS boolean +AS $$ + BEGIN + IF $1<>0 AND $1<>1 THEN + RAISE EXCEPTION '% is not a boolean', $1; END IF; + RETURN $1<>0; + END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(integer) +RETURNS boolean +AS $$ + BEGIN + IF $1<>0 AND $1<>1 THEN + RAISE EXCEPTION '% is not a boolean', $1; END IF; + RETURN $1<>0; + END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(character varying) +RETURNS boolean +AS $$ + DECLARE + istrue boolean; + BEGIN + SELECT + $1='1' OR lower($1) = 'on' + OR lower($1)='t' OR lower($1)='true' + OR lower($1)='y' OR lower($1)='yes' + INTO istrue; + IF istrue + OR $1='0' OR lower($1) = 'off' + OR lower($1)='f' OR lower($1)='false' + OR lower($1)='n' OR lower($1)='no' + THEN + RETURN istrue; + END IF; + RAISE EXCEPTION '% is not a boolean', $1; + END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(text) +RETURNS boolean +AS $$ + DECLARE + istrue boolean; + BEGIN + SELECT + $1='1' OR lower($1) = 'on' + OR lower($1)='t' OR lower($1)='true' + OR lower($1)='y' OR lower($1)='yes' + INTO istrue; + IF istrue + OR $1='0' OR lower($1) = 'off' + OR lower($1)='f' OR lower($1)='false' + OR lower($1)='n' OR lower($1)='no' + THEN + RETURN istrue; + END IF; + RAISE EXCEPTION '% is not a boolean', $1; + END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_boolean(character) +RETURNS boolean +AS $$ + DECLARE + istrue boolean; + BEGIN + SELECT + $1='1' OR lower($1) = 'on' + OR lower($1)='t' OR lower($1)='true' + OR lower($1)='y' OR lower($1)='yes' + INTO istrue; + IF istrue + OR $1='0' OR lower($1) = 'off' + OR lower($1)='f' OR lower($1)='false' + OR lower($1)='n' OR lower($1)='no' + THEN + RETURN istrue; + END IF; + RAISE EXCEPTION '% is not a boolean', $1; + END; +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_real + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(smallint) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(bigint) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(double precision) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(character) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(integer) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(real) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(character varying) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(mathesar_types.mathesar_money) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(numeric) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(text) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(money) +RETURNS real +AS $$ + + BEGIN + RETURN $1::real; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_real(boolean) +RETURNS real +AS $$ + +BEGIN + IF $1 THEN + RETURN 1::real; + END IF; + RETURN 0::real; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_double_precision + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(smallint) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(bigint) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(double precision) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(character) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(integer) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(real) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(character varying) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(mathesar_types.mathesar_money) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(numeric) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(text) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(money) +RETURNS double precision +AS $$ + + BEGIN + RETURN $1::double precision; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_double_precision(boolean) +RETURNS double precision +AS $$ + +BEGIN + IF $1 THEN + RETURN 1::double precision; + END IF; + RETURN 0::double precision; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_email + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_email(mathesar_types.email) +RETURNS mathesar_types.email +AS $$ + + BEGIN + RETURN $1::mathesar_types.email; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_email(character varying) +RETURNS mathesar_types.email +AS $$ + + BEGIN + RETURN $1::mathesar_types.email; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_email(text) +RETURNS mathesar_types.email +AS $$ + + BEGIN + RETURN $1::mathesar_types.email; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_email(character) +RETURNS mathesar_types.email +AS $$ + + BEGIN + RETURN $1::mathesar_types.email; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_smallint + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(smallint) +RETURNS smallint +AS $$ + + BEGIN + RETURN $1::smallint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(character varying) +RETURNS smallint +AS $$ + + BEGIN + RETURN $1::smallint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(bigint) +RETURNS smallint +AS $$ + + BEGIN + RETURN $1::smallint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(character) +RETURNS smallint +AS $$ + + BEGIN + RETURN $1::smallint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(text) +RETURNS smallint +AS $$ + + BEGIN + RETURN $1::smallint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(integer) +RETURNS smallint +AS $$ + + BEGIN + RETURN $1::smallint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(real) +RETURNS smallint +AS $$ + + DECLARE integer_res smallint; + BEGIN + SELECT $1::smallint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to smallint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(mathesar_types.mathesar_money) +RETURNS smallint +AS $$ + + DECLARE integer_res smallint; + BEGIN + SELECT $1::smallint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to smallint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(double precision) +RETURNS smallint +AS $$ + + DECLARE integer_res smallint; + BEGIN + SELECT $1::smallint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to smallint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(numeric) +RETURNS smallint +AS $$ + + DECLARE integer_res smallint; + BEGIN + SELECT $1::smallint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to smallint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(money) +RETURNS smallint +AS $$ + + DECLARE integer_res smallint; + BEGIN + SELECT $1::smallint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to smallint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_smallint(boolean) +RETURNS smallint +AS $$ + +BEGIN + IF $1 THEN + RETURN 1::smallint; + END IF; + RETURN 0::smallint; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(smallint) +RETURNS bigint +AS $$ + + BEGIN + RETURN $1::bigint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_bigint + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(character varying) +RETURNS bigint +AS $$ + + BEGIN + RETURN $1::bigint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(bigint) +RETURNS bigint +AS $$ + + BEGIN + RETURN $1::bigint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(character) +RETURNS bigint +AS $$ + + BEGIN + RETURN $1::bigint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(text) +RETURNS bigint +AS $$ + + BEGIN + RETURN $1::bigint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(integer) +RETURNS bigint +AS $$ + + BEGIN + RETURN $1::bigint; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(real) +RETURNS bigint +AS $$ + + DECLARE integer_res bigint; + BEGIN + SELECT $1::bigint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to bigint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(mathesar_types.mathesar_money) +RETURNS bigint +AS $$ + + DECLARE integer_res bigint; + BEGIN + SELECT $1::bigint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to bigint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(double precision) +RETURNS bigint +AS $$ + + DECLARE integer_res bigint; + BEGIN + SELECT $1::bigint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to bigint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(numeric) +RETURNS bigint +AS $$ + + DECLARE integer_res bigint; + BEGIN + SELECT $1::bigint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to bigint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(money) +RETURNS bigint +AS $$ + + DECLARE integer_res bigint; + BEGIN + SELECT $1::bigint INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to bigint without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_bigint(boolean) +RETURNS bigint +AS $$ + +BEGIN + IF $1 THEN + RETURN 1::bigint; + END IF; + RETURN 0::bigint; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_integer + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(smallint) +RETURNS integer +AS $$ + + BEGIN + RETURN $1::integer; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(character varying) +RETURNS integer +AS $$ + + BEGIN + RETURN $1::integer; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(bigint) +RETURNS integer +AS $$ + + BEGIN + RETURN $1::integer; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(character) +RETURNS integer +AS $$ + + BEGIN + RETURN $1::integer; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(text) +RETURNS integer +AS $$ + + BEGIN + RETURN $1::integer; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(integer) +RETURNS integer +AS $$ + + BEGIN + RETURN $1::integer; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(real) +RETURNS integer +AS $$ + + DECLARE integer_res integer; + BEGIN + SELECT $1::integer INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to integer without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(mathesar_types.mathesar_money) +RETURNS integer +AS $$ + + DECLARE integer_res integer; + BEGIN + SELECT $1::integer INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to integer without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(double precision) +RETURNS integer +AS $$ + + DECLARE integer_res integer; + BEGIN + SELECT $1::integer INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to integer without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(numeric) +RETURNS integer +AS $$ + + DECLARE integer_res integer; + BEGIN + SELECT $1::integer INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to integer without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(money) +RETURNS integer +AS $$ + + DECLARE integer_res integer; + BEGIN + SELECT $1::integer INTO integer_res; + IF integer_res = $1 THEN + RETURN integer_res; + END IF; + RAISE EXCEPTION '% cannot be cast to integer without loss', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_integer(boolean) +RETURNS integer +AS $$ + +BEGIN + IF $1 THEN + RETURN 1::integer; + END IF; + RETURN 0::integer; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_interval + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_interval(interval) +RETURNS interval +AS $$ + + BEGIN + RETURN $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_interval(character varying) +RETURNS interval +AS $$ + BEGIN + PERFORM $1::numeric; + RAISE EXCEPTION '% is a numeric', $1; + EXCEPTION + WHEN sqlstate '22P02' THEN + RETURN $1::interval; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_interval(text) +RETURNS interval +AS $$ + BEGIN + PERFORM $1::numeric; + RAISE EXCEPTION '% is a numeric', $1; + EXCEPTION + WHEN sqlstate '22P02' THEN + RETURN $1::interval; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_interval(character) +RETURNS interval +AS $$ + BEGIN + PERFORM $1::numeric; + RAISE EXCEPTION '% is a numeric', $1; + EXCEPTION + WHEN sqlstate '22P02' THEN + RETURN $1::interval; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_time_without_time_zone + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_time_without_time_zone(text) +RETURNS time without time zone +AS $$ + + BEGIN + RETURN $1::time without time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_time_without_time_zone(character varying) +RETURNS time without time zone +AS $$ + + BEGIN + RETURN $1::time without time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_time_without_time_zone(time without time zone) +RETURNS time without time zone +AS $$ + + BEGIN + RETURN $1::time without time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_time_without_time_zone(time with time zone) +RETURNS time without time zone +AS $$ + + BEGIN + RETURN $1::time without time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_time_with_time_zone + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_time_with_time_zone(text) +RETURNS time with time zone +AS $$ + + BEGIN + RETURN $1::time with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_time_with_time_zone(character varying) +RETURNS time with time zone +AS $$ + + BEGIN + RETURN $1::time with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_time_with_time_zone(time without time zone) +RETURNS time with time zone +AS $$ + + BEGIN + RETURN $1::time with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_time_with_time_zone(time with time zone) +RETURNS time with time zone +AS $$ + + BEGIN + RETURN $1::time with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_timestamp_with_time_zone + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_with_time_zone(character varying) +RETURNS timestamp with time zone +AS $$ + + BEGIN + RETURN $1::timestamp with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_with_time_zone(timestamp with time zone) +RETURNS timestamp with time zone +AS $$ + + BEGIN + RETURN $1::timestamp with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_with_time_zone(character) +RETURNS timestamp with time zone +AS $$ + + BEGIN + RETURN $1::timestamp with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_with_time_zone(timestamp without time zone) +RETURNS timestamp with time zone +AS $$ + + BEGIN + RETURN $1::timestamp with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_with_time_zone(text) +RETURNS timestamp with time zone +AS $$ + + BEGIN + RETURN $1::timestamp with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_timestamp_without_time_zone + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_without_time_zone(timestamp without time zone) +RETURNS timestamp without time zone +AS $$ + + BEGIN + RETURN $1::timestamp without time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_without_time_zone(character varying) +RETURNS timestamp without time zone +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = timestamp_value) THEN + RETURN $1::timestamp without time zone; + END IF; + + RAISE EXCEPTION '% is not a timestamp without time zone', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_without_time_zone(text) +RETURNS timestamp without time zone +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = timestamp_value) THEN + RETURN $1::timestamp without time zone; + END IF; + + RAISE EXCEPTION '% is not a timestamp without time zone', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_without_time_zone(character) +RETURNS timestamp without time zone +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = timestamp_value) THEN + RETURN $1::timestamp without time zone; + END IF; + + RAISE EXCEPTION '% is not a timestamp without time zone', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_without_time_zone(date) +RETURNS timestamp without time zone +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = timestamp_value) THEN + RETURN $1::timestamp without time zone; + END IF; + + RAISE EXCEPTION '% is not a timestamp without time zone', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_timestamp_without_time_zone(timestamp with time zone) +RETURNS timestamp without time zone +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = timestamp_value) THEN + RETURN $1::timestamp without time zone; + END IF; + + RAISE EXCEPTION '% is not a timestamp without time zone', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_date + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_date(date) +RETURNS date +AS $$ + + BEGIN + RETURN $1::timestamp with time zone; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_date(character varying) +RETURNS date +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = date_value) THEN + RETURN $1::date; + END IF; + + RAISE EXCEPTION '% is not a date', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_date(text) +RETURNS date +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = date_value) THEN + RETURN $1::date; + END IF; + + RAISE EXCEPTION '% is not a date', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_date(character) +RETURNS date +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = date_value) THEN + RETURN $1::date; + END IF; + + RAISE EXCEPTION '% is not a date', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_date(timestamp without time zone) +RETURNS date +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = date_value) THEN + RETURN $1::date; + END IF; + + RAISE EXCEPTION '% is not a date', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_date(timestamp with time zone) +RETURNS date +AS $$ + +DECLARE +timestamp_value_with_tz NUMERIC; +timestamp_value NUMERIC; +date_value NUMERIC; +BEGIN + SET LOCAL TIME ZONE 'UTC'; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITH TIME ZONE ) INTO timestamp_value_with_tz; + SELECT EXTRACT(EPOCH FROM $1::TIMESTAMP WITHOUT TIME ZONE) INTO timestamp_value; + SELECT EXTRACT(EPOCH FROM $1::DATE ) INTO date_value; + + IF (timestamp_value_with_tz = date_value) THEN + RETURN $1::date; + END IF; + + RAISE EXCEPTION '% is not a date', $1; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_mathesar_money + +CREATE OR REPLACE FUNCTION mathesar_types.get_mathesar_money_array(text) RETURNS text[] +AS $$ + DECLARE + raw_arr text[]; + actual_number_arr text[]; + group_divider_arr text[]; + decimal_point_arr text[]; + actual_number text; + group_divider text; + decimal_point text; + BEGIN + SELECT regexp_matches($1, '^(?:(?:[^.,0-9]+)([0-9]{4,}(?:([,.])[0-9]+)?|[0-9]{1,3}(?:([,.])[0-9]{1,2}|[0-9]{4,})?|[0-9]{1,3}(,)[0-9]{3}(\.)[0-9]+|[0-9]{1,3}(\.)[0-9]{3}(,)[0-9]+|[0-9]{1,3}(?:(,)[0-9]{3}){2,}(?:(\.)[0-9]+)?|[0-9]{1,3}(?:(\.)[0-9]{3}){2,}(?:(,)[0-9]+)?|[0-9]{1,3}(?:( )[0-9]{3})+(?:([,.])[0-9]+)?|[0-9]{1,2}(?:(,)[0-9]{2})+,[0-9]{3}(?:(\.)[0-9]+)?)(?:[^.,0-9]+)?|(?:[^.,0-9]+)?([0-9]{4,}(?:([,.])[0-9]+)?|[0-9]{1,3}(?:([,.])[0-9]{1,2}|[0-9]{4,})?|[0-9]{1,3}(,)[0-9]{3}(\.)[0-9]+|[0-9]{1,3}(\.)[0-9]{3}(,)[0-9]+|[0-9]{1,3}(?:(,)[0-9]{3}){2,}(?:(\.)[0-9]+)?|[0-9]{1,3}(?:(\.)[0-9]{3}){2,}(?:(,)[0-9]+)?|[0-9]{1,3}(?:( )[0-9]{3})+(?:([,.])[0-9]+)?|[0-9]{1,2}(?:(,)[0-9]{2})+,[0-9]{3}(?:(\.)[0-9]+)?)(?:[^.,0-9]+))$') INTO raw_arr; + IF raw_arr IS NULL THEN + RETURN NULL; + END IF; + SELECT array_remove(ARRAY[raw_arr[1],raw_arr[16]], null) INTO actual_number_arr; + SELECT array_remove(ARRAY[raw_arr[4],raw_arr[6],raw_arr[8],raw_arr[10],raw_arr[12],raw_arr[14],raw_arr[19],raw_arr[21],raw_arr[23],raw_arr[25],raw_arr[27],raw_arr[29]], null) INTO group_divider_arr; + SELECT array_remove(ARRAY[raw_arr[2],raw_arr[3],raw_arr[5],raw_arr[7],raw_arr[9],raw_arr[11],raw_arr[13],raw_arr[15],raw_arr[17],raw_arr[18],raw_arr[20],raw_arr[22],raw_arr[24],raw_arr[26],raw_arr[28],raw_arr[30]], null) INTO decimal_point_arr; + SELECT actual_number_arr[1] INTO actual_number; + SELECT group_divider_arr[1] INTO group_divider; + SELECT decimal_point_arr[1] INTO decimal_point; + RETURN ARRAY[actual_number, group_divider, decimal_point, replace($1, actual_number, '')]; + END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(mathesar_types.mathesar_money) +RETURNS mathesar_types.mathesar_money +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(smallint) +RETURNS mathesar_types.mathesar_money +AS $$ + + BEGIN + RETURN $1::numeric::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(real) +RETURNS mathesar_types.mathesar_money +AS $$ + + BEGIN + RETURN $1::numeric::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(bigint) +RETURNS mathesar_types.mathesar_money +AS $$ + + BEGIN + RETURN $1::numeric::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(double precision) +RETURNS mathesar_types.mathesar_money +AS $$ + + BEGIN + RETURN $1::numeric::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(numeric) +RETURNS mathesar_types.mathesar_money +AS $$ + + BEGIN + RETURN $1::numeric::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(integer) +RETURNS mathesar_types.mathesar_money +AS $$ + + BEGIN + RETURN $1::numeric::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(character varying) +RETURNS mathesar_types.mathesar_money +AS $$ + + DECLARE decimal_point text; + DECLARE is_negative boolean; + DECLARE money_arr text[]; + DECLARE money_num text; + BEGIN + SELECT mathesar_types.get_mathesar_money_array($1::text) INTO money_arr; + IF money_arr IS NULL THEN + RAISE EXCEPTION '% cannot be cast to mathesar_types.mathesar_money', $1; + END IF; + SELECT money_arr[1] INTO money_num; + SELECT ltrim(to_char(1, 'D'), ' ') INTO decimal_point; + SELECT $1::text ~ '^.*(-|\(.+\)).*$' INTO is_negative; + IF money_arr[2] IS NOT NULL THEN + SELECT regexp_replace(money_num, money_arr[2], '', 'gq') INTO money_num; + END IF; + IF money_arr[3] IS NOT NULL THEN + SELECT regexp_replace(money_num, money_arr[3], decimal_point, 'q') INTO money_num; + END IF; + IF is_negative THEN + RETURN ('-' || money_num)::mathesar_types.mathesar_money; + END IF; + RETURN money_num::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(text) +RETURNS mathesar_types.mathesar_money +AS $$ + + DECLARE decimal_point text; + DECLARE is_negative boolean; + DECLARE money_arr text[]; + DECLARE money_num text; + BEGIN + SELECT mathesar_types.get_mathesar_money_array($1::text) INTO money_arr; + IF money_arr IS NULL THEN + RAISE EXCEPTION '% cannot be cast to mathesar_types.mathesar_money', $1; + END IF; + SELECT money_arr[1] INTO money_num; + SELECT ltrim(to_char(1, 'D'), ' ') INTO decimal_point; + SELECT $1::text ~ '^.*(-|\(.+\)).*$' INTO is_negative; + IF money_arr[2] IS NOT NULL THEN + SELECT regexp_replace(money_num, money_arr[2], '', 'gq') INTO money_num; + END IF; + IF money_arr[3] IS NOT NULL THEN + SELECT regexp_replace(money_num, money_arr[3], decimal_point, 'q') INTO money_num; + END IF; + IF is_negative THEN + RETURN ('-' || money_num)::mathesar_types.mathesar_money; + END IF; + RETURN money_num::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(money) +RETURNS mathesar_types.mathesar_money +AS $$ + + DECLARE decimal_point text; + DECLARE is_negative boolean; + DECLARE money_arr text[]; + DECLARE money_num text; + BEGIN + SELECT mathesar_types.get_mathesar_money_array($1::text) INTO money_arr; + IF money_arr IS NULL THEN + RAISE EXCEPTION '% cannot be cast to mathesar_types.mathesar_money', $1; + END IF; + SELECT money_arr[1] INTO money_num; + SELECT ltrim(to_char(1, 'D'), ' ') INTO decimal_point; + SELECT $1::text ~ '^.*(-|\(.+\)).*$' INTO is_negative; + IF money_arr[2] IS NOT NULL THEN + SELECT regexp_replace(money_num, money_arr[2], '', 'gq') INTO money_num; + END IF; + IF money_arr[3] IS NOT NULL THEN + SELECT regexp_replace(money_num, money_arr[3], decimal_point, 'q') INTO money_num; + END IF; + IF is_negative THEN + RETURN ('-' || money_num)::mathesar_types.mathesar_money; + END IF; + RETURN money_num::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_money(character) +RETURNS mathesar_types.mathesar_money +AS $$ + + DECLARE decimal_point text; + DECLARE is_negative boolean; + DECLARE money_arr text[]; + DECLARE money_num text; + BEGIN + SELECT mathesar_types.get_mathesar_money_array($1::text) INTO money_arr; + IF money_arr IS NULL THEN + RAISE EXCEPTION '% cannot be cast to mathesar_types.mathesar_money', $1; + END IF; + SELECT money_arr[1] INTO money_num; + SELECT ltrim(to_char(1, 'D'), ' ') INTO decimal_point; + SELECT $1::text ~ '^.*(-|\(.+\)).*$' INTO is_negative; + IF money_arr[2] IS NOT NULL THEN + SELECT regexp_replace(money_num, money_arr[2], '', 'gq') INTO money_num; + END IF; + IF money_arr[3] IS NOT NULL THEN + SELECT regexp_replace(money_num, money_arr[3], decimal_point, 'q') INTO money_num; + END IF; + IF is_negative THEN + RETURN ('-' || money_num)::mathesar_types.mathesar_money; + END IF; + RETURN money_num::mathesar_types.mathesar_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_money + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(mathesar_types.mathesar_money) +RETURNS money +AS $$ + + BEGIN + RETURN $1::money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(money) +RETURNS money +AS $$ + + BEGIN + RETURN $1::money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(smallint) +RETURNS money +AS $$ + + BEGIN + RETURN $1::numeric::money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(real) +RETURNS money +AS $$ + + BEGIN + RETURN $1::numeric::money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(bigint) +RETURNS money +AS $$ + + BEGIN + RETURN $1::numeric::money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(double precision) +RETURNS money +AS $$ + + BEGIN + RETURN $1::numeric::money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(numeric) +RETURNS money +AS $$ + + BEGIN + RETURN $1::numeric::money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(integer) +RETURNS money +AS $$ + + BEGIN + RETURN $1::numeric::money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(character varying) +RETURNS money +AS $$ + + DECLARE currency text; + BEGIN + SELECT to_char(1, 'L') INTO currency; + IF ($1 LIKE '%' || currency) OR ($1 LIKE currency || '%') THEN + RETURN $1::money; + END IF; + RAISE EXCEPTION '% cannot be cast to money as currency symbol is missing', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(text) +RETURNS money +AS $$ + + DECLARE currency text; + BEGIN + SELECT to_char(1, 'L') INTO currency; + IF ($1 LIKE '%' || currency) OR ($1 LIKE currency || '%') THEN + RETURN $1::money; + END IF; + RAISE EXCEPTION '% cannot be cast to money as currency symbol is missing', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_money(character) +RETURNS money +AS $$ + + DECLARE currency text; + BEGIN + SELECT to_char(1, 'L') INTO currency; + IF ($1 LIKE '%' || currency) OR ($1 LIKE currency || '%') THEN + RETURN $1::money; + END IF; + RAISE EXCEPTION '% cannot be cast to money as currency symbol is missing', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_multicurrency_money + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(mathesar_types.multicurrency_money) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN $1::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(smallint) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(real) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(mathesar_types.mathesar_money) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(bigint) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(double precision) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(numeric) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(integer) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(character varying) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1::numeric, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(text) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1::numeric, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(money) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1::numeric, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_multicurrency_money(character) +RETURNS mathesar_types.multicurrency_money +AS $$ + + BEGIN + RETURN ROW($1::numeric, 'USD')::mathesar_types.multicurrency_money; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_character_varying + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(time without time zone) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(bigint) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(double precision) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(mathesar_types.multicurrency_money) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(mathesar_types.uri) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(time with time zone) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(integer) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(real) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(mathesar_types.mathesar_money) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(tsvector) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(jsonb) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying("char") +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(interval) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(macaddr) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(smallint) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(timestamp with time zone) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(inet) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(boolean) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(int4range) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(mathesar_types.mathesar_json_object) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(tstzrange) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(regclass) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(character) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(tsrange) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(numrange) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(cidr) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(character varying) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(numeric) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(mathesar_types.email) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(bit) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(money) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(int8range) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(oid) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(json) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(daterange) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(timestamp without time zone) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(bytea) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(date) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(mathesar_types.mathesar_json_array) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(text) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character_varying(uuid) +RETURNS character varying +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_character + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(time without time zone) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(bigint) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(double precision) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(mathesar_types.multicurrency_money) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(mathesar_types.uri) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(time with time zone) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(integer) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(real) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(mathesar_types.mathesar_money) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(tsvector) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(jsonb) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character("char") +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(interval) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(macaddr) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(smallint) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(timestamp with time zone) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(inet) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(boolean) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(int4range) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(mathesar_types.mathesar_json_object) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(tstzrange) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(regclass) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(character) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(tsrange) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(numrange) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(cidr) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(character varying) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(numeric) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(mathesar_types.email) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(bit) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(money) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(int8range) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(oid) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(json) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(daterange) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(timestamp without time zone) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(bytea) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(date) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(mathesar_types.mathesar_json_array) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(text) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_character(uuid) +RETURNS character +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to__double_quote_char_double_quote_ + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(time without time zone) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(bigint) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(double precision) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(mathesar_types.multicurrency_money) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(mathesar_types.uri) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(time with time zone) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(integer) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(real) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(mathesar_types.mathesar_money) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(tsvector) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(jsonb) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_("char") +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(interval) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(macaddr) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(smallint) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(timestamp with time zone) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(inet) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(boolean) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(int4range) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(mathesar_types.mathesar_json_object) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(tstzrange) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(regclass) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(character) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(tsrange) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(numrange) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(cidr) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(character varying) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(numeric) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(mathesar_types.email) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(bit) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(money) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(int8range) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(oid) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(json) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(daterange) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(timestamp without time zone) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(bytea) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(date) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(mathesar_types.mathesar_json_array) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(text) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to__double_quote_char_double_quote_(uuid) +RETURNS "char" +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_text + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(time without time zone) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(bigint) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(double precision) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(mathesar_types.multicurrency_money) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(mathesar_types.uri) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(time with time zone) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(integer) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(real) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(mathesar_types.mathesar_money) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(tsvector) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(jsonb) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text("char") +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(interval) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(macaddr) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(smallint) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(timestamp with time zone) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(inet) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(boolean) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(int4range) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(mathesar_types.mathesar_json_object) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(tstzrange) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(regclass) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(character) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(tsrange) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(numrange) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(cidr) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(character varying) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(numeric) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(mathesar_types.email) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(bit) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(money) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(int8range) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(oid) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(json) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(daterange) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(timestamp without time zone) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(bytea) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(date) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(mathesar_types.mathesar_json_array) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(text) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_text(uuid) +RETURNS text +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_name + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(time without time zone) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(bigint) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(double precision) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(mathesar_types.multicurrency_money) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(mathesar_types.uri) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(time with time zone) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(integer) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(real) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(mathesar_types.mathesar_money) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(tsvector) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(jsonb) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name("char") +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(interval) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(macaddr) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(smallint) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(timestamp with time zone) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(inet) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(boolean) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(int4range) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(mathesar_types.mathesar_json_object) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(tstzrange) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(regclass) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(character) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(tsrange) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(numrange) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(cidr) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(character varying) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(numeric) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(mathesar_types.email) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(bit) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(money) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(int8range) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(oid) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(json) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(daterange) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(timestamp without time zone) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(bytea) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(date) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(mathesar_types.mathesar_json_array) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(text) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_name(uuid) +RETURNS name +AS $$ + + BEGIN + RETURN $1::text; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_uri + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_uri(character varying) +RETURNS mathesar_types.uri +AS $$ + + DECLARE uri_res mathesar_types.uri := 'https://centerofci.org'; + DECLARE uri_tld text; + BEGIN + RETURN $1::mathesar_types.uri; + EXCEPTION WHEN SQLSTATE '23514' THEN + SELECT lower(('http://' || $1)::mathesar_types.uri) INTO uri_res; + SELECT (regexp_match(mathesar_types.uri_authority(uri_res), '(?<=\.)(?:.(?!\.))+$'))[1] + INTO uri_tld; + IF EXISTS(SELECT 1 FROM mathesar_types.top_level_domains WHERE tld = uri_tld) THEN + RETURN uri_res; + END IF; + RAISE EXCEPTION '% is not a mathesar_types.uri', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_uri(text) +RETURNS mathesar_types.uri +AS $$ + + DECLARE uri_res mathesar_types.uri := 'https://centerofci.org'; + DECLARE uri_tld text; + BEGIN + RETURN $1::mathesar_types.uri; + EXCEPTION WHEN SQLSTATE '23514' THEN + SELECT lower(('http://' || $1)::mathesar_types.uri) INTO uri_res; + SELECT (regexp_match(mathesar_types.uri_authority(uri_res), '(?<=\.)(?:.(?!\.))+$'))[1] + INTO uri_tld; + IF EXISTS(SELECT 1 FROM mathesar_types.top_level_domains WHERE tld = uri_tld) THEN + RETURN uri_res; + END IF; + RAISE EXCEPTION '% is not a mathesar_types.uri', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_uri(mathesar_types.uri) +RETURNS mathesar_types.uri +AS $$ + + DECLARE uri_res mathesar_types.uri := 'https://centerofci.org'; + DECLARE uri_tld text; + BEGIN + RETURN $1::mathesar_types.uri; + EXCEPTION WHEN SQLSTATE '23514' THEN + SELECT lower(('http://' || $1)::mathesar_types.uri) INTO uri_res; + SELECT (regexp_match(mathesar_types.uri_authority(uri_res), '(?<=\.)(?:.(?!\.))+$'))[1] + INTO uri_tld; + IF EXISTS(SELECT 1 FROM mathesar_types.top_level_domains WHERE tld = uri_tld) THEN + RETURN uri_res; + END IF; + RAISE EXCEPTION '% is not a mathesar_types.uri', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_uri(character) +RETURNS mathesar_types.uri +AS $$ + + DECLARE uri_res mathesar_types.uri := 'https://centerofci.org'; + DECLARE uri_tld text; + BEGIN + RETURN $1::mathesar_types.uri; + EXCEPTION WHEN SQLSTATE '23514' THEN + SELECT lower(('http://' || $1)::mathesar_types.uri) INTO uri_res; + SELECT (regexp_match(mathesar_types.uri_authority(uri_res), '(?<=\.)(?:.(?!\.))+$'))[1] + INTO uri_tld; + IF EXISTS(SELECT 1 FROM mathesar_types.top_level_domains WHERE tld = uri_tld) THEN + RETURN uri_res; + END IF; + RAISE EXCEPTION '% is not a mathesar_types.uri', $1; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_numeric + +CREATE OR REPLACE FUNCTION mathesar_types.get_numeric_array(text) RETURNS text[] +AS $$ + DECLARE + raw_arr text[]; + actual_number_arr text[]; + group_divider_arr text[]; + decimal_point_arr text[]; + actual_number text; + group_divider text; + decimal_point text; + BEGIN + SELECT regexp_matches($1, '^(?:[+-]?([0-9]{4,}(?:([,.])[0-9]+)?|[0-9]{1,3}(?:([,.])[0-9]{1,2}|[0-9]{4,})?|[0-9]{1,3}(,)[0-9]{3}(\.)[0-9]+|[0-9]{1,3}(\.)[0-9]{3}(,)[0-9]+|[0-9]{1,3}(?:(,)[0-9]{3}){2,}(?:(\.)[0-9]+)?|[0-9]{1,3}(?:(\.)[0-9]{3}){2,}(?:(,)[0-9]+)?|[0-9]{1,3}(?:( )[0-9]{3})+(?:([,.])[0-9]+)?|[0-9]{1,2}(?:(,)[0-9]{2})+,[0-9]{3}(?:(\.)[0-9]+)?|[0-9]{1,3}(?:(\'')[0-9]{3})+(?:([.])[0-9]+)?))$') INTO raw_arr; + IF raw_arr IS NULL THEN + RETURN NULL; + END IF; + SELECT array_remove(ARRAY[raw_arr[1]], null) INTO actual_number_arr; + SELECT array_remove(ARRAY[raw_arr[4],raw_arr[6],raw_arr[8],raw_arr[10],raw_arr[12],raw_arr[14],raw_arr[16]], null) INTO group_divider_arr; + SELECT array_remove(ARRAY[raw_arr[2],raw_arr[3],raw_arr[5],raw_arr[7],raw_arr[9],raw_arr[11],raw_arr[13],raw_arr[15],raw_arr[17]], null) INTO decimal_point_arr; + SELECT actual_number_arr[1] INTO actual_number; + SELECT group_divider_arr[1] INTO group_divider; + SELECT decimal_point_arr[1] INTO decimal_point; + RETURN ARRAY[actual_number, group_divider, decimal_point]; + END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(smallint) +RETURNS numeric +AS $$ + + BEGIN + RETURN $1::numeric; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(real) +RETURNS numeric +AS $$ + + BEGIN + RETURN $1::numeric; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(bigint) +RETURNS numeric +AS $$ + + BEGIN + RETURN $1::numeric; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(double precision) +RETURNS numeric +AS $$ + + BEGIN + RETURN $1::numeric; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(numeric) +RETURNS numeric +AS $$ + + BEGIN + RETURN $1::numeric; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(money) +RETURNS numeric +AS $$ + + BEGIN + RETURN $1::numeric; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(integer) +RETURNS numeric +AS $$ + + BEGIN + RETURN $1::numeric; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(character varying) +RETURNS numeric +AS $$ + +DECLARE decimal_point text; +DECLARE is_negative boolean; +DECLARE numeric_arr text[]; +DECLARE numeric text; +BEGIN + SELECT mathesar_types.get_numeric_array($1::text) INTO numeric_arr; + IF numeric_arr IS NULL THEN + RAISE EXCEPTION '% cannot be cast to numeric', $1; + END IF; + SELECT numeric_arr[1] INTO numeric; + SELECT ltrim(to_char(1, 'D'), ' ') INTO decimal_point; + SELECT $1::text ~ '^-.*$' INTO is_negative; + IF numeric_arr[2] IS NOT NULL THEN + SELECT regexp_replace(numeric, numeric_arr[2], '', 'gq') INTO numeric; + END IF; + IF numeric_arr[3] IS NOT NULL THEN + SELECT regexp_replace(numeric, numeric_arr[3], decimal_point, 'q') INTO numeric; + END IF; + IF is_negative THEN + RETURN ('-' || numeric)::numeric; + END IF; + RETURN numeric::numeric; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(text) +RETURNS numeric +AS $$ + +DECLARE decimal_point text; +DECLARE is_negative boolean; +DECLARE numeric_arr text[]; +DECLARE numeric text; +BEGIN + SELECT mathesar_types.get_numeric_array($1::text) INTO numeric_arr; + IF numeric_arr IS NULL THEN + RAISE EXCEPTION '% cannot be cast to numeric', $1; + END IF; + SELECT numeric_arr[1] INTO numeric; + SELECT ltrim(to_char(1, 'D'), ' ') INTO decimal_point; + SELECT $1::text ~ '^-.*$' INTO is_negative; + IF numeric_arr[2] IS NOT NULL THEN + SELECT regexp_replace(numeric, numeric_arr[2], '', 'gq') INTO numeric; + END IF; + IF numeric_arr[3] IS NOT NULL THEN + SELECT regexp_replace(numeric, numeric_arr[3], decimal_point, 'q') INTO numeric; + END IF; + IF is_negative THEN + RETURN ('-' || numeric)::numeric; + END IF; + RETURN numeric::numeric; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(character) +RETURNS numeric +AS $$ + +DECLARE decimal_point text; +DECLARE is_negative boolean; +DECLARE numeric_arr text[]; +DECLARE numeric text; +BEGIN + SELECT mathesar_types.get_numeric_array($1::text) INTO numeric_arr; + IF numeric_arr IS NULL THEN + RAISE EXCEPTION '% cannot be cast to numeric', $1; + END IF; + SELECT numeric_arr[1] INTO numeric; + SELECT ltrim(to_char(1, 'D'), ' ') INTO decimal_point; + SELECT $1::text ~ '^-.*$' INTO is_negative; + IF numeric_arr[2] IS NOT NULL THEN + SELECT regexp_replace(numeric, numeric_arr[2], '', 'gq') INTO numeric; + END IF; + IF numeric_arr[3] IS NOT NULL THEN + SELECT regexp_replace(numeric, numeric_arr[3], decimal_point, 'q') INTO numeric; + END IF; + IF is_negative THEN + RETURN ('-' || numeric)::numeric; + END IF; + RETURN numeric::numeric; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_numeric(boolean) +RETURNS numeric +AS $$ + +BEGIN + IF $1 THEN + RETURN 1::numeric; + END IF; + RETURN 0::numeric; +END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_jsonb + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_jsonb(character varying) +RETURNS jsonb +AS $$ + + BEGIN + RETURN $1::jsonb; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_jsonb(json) +RETURNS jsonb +AS $$ + + BEGIN + RETURN $1::jsonb; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_jsonb(character) +RETURNS jsonb +AS $$ + + BEGIN + RETURN $1::jsonb; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_jsonb(jsonb) +RETURNS jsonb +AS $$ + + BEGIN + RETURN $1::jsonb; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_jsonb(mathesar_types.mathesar_json_array) +RETURNS jsonb +AS $$ + + BEGIN + RETURN $1::jsonb; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_jsonb(text) +RETURNS jsonb +AS $$ + + BEGIN + RETURN $1::jsonb; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_jsonb(mathesar_types.mathesar_json_object) +RETURNS jsonb +AS $$ + + BEGIN + RETURN $1::jsonb; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_mathesar_json_array + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_array(character varying) +RETURNS mathesar_types.mathesar_json_array +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_array; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_array(json) +RETURNS mathesar_types.mathesar_json_array +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_array; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_array(character) +RETURNS mathesar_types.mathesar_json_array +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_array; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_array(jsonb) +RETURNS mathesar_types.mathesar_json_array +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_array; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_array(mathesar_types.mathesar_json_array) +RETURNS mathesar_types.mathesar_json_array +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_array; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_array(text) +RETURNS mathesar_types.mathesar_json_array +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_array; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_array(mathesar_types.mathesar_json_object) +RETURNS mathesar_types.mathesar_json_array +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_array; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_mathesar_json_object + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_object(character varying) +RETURNS mathesar_types.mathesar_json_object +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_object; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_object(json) +RETURNS mathesar_types.mathesar_json_object +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_object; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_object(character) +RETURNS mathesar_types.mathesar_json_object +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_object; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_object(jsonb) +RETURNS mathesar_types.mathesar_json_object +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_object; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_object(mathesar_types.mathesar_json_array) +RETURNS mathesar_types.mathesar_json_object +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_object; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_object(text) +RETURNS mathesar_types.mathesar_json_object +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_object; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_mathesar_json_object(mathesar_types.mathesar_json_object) +RETURNS mathesar_types.mathesar_json_object +AS $$ + + BEGIN + RETURN $1::mathesar_types.mathesar_json_object; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + + +-- mathesar_types.cast_to_json + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_json(character varying) +RETURNS json +AS $$ + + BEGIN + RETURN $1::json; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_json(json) +RETURNS json +AS $$ + + BEGIN + RETURN $1::json; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_json(character) +RETURNS json +AS $$ + + BEGIN + RETURN $1::json; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_json(jsonb) +RETURNS json +AS $$ + + BEGIN + RETURN $1::json; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_json(mathesar_types.mathesar_json_array) +RETURNS json +AS $$ + + BEGIN + RETURN $1::json; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_json(text) +RETURNS json +AS $$ + + BEGIN + RETURN $1::json; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; + +CREATE OR REPLACE FUNCTION mathesar_types.cast_to_json(mathesar_types.mathesar_json_object) +RETURNS json +AS $$ + + BEGIN + RETURN $1::json; + END; + +$$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; diff --git a/db/sql/50_msar_permissions.sql b/db/sql/50_msar_permissions.sql new file mode 100644 index 0000000000..9b5631e425 --- /dev/null +++ b/db/sql/50_msar_permissions.sql @@ -0,0 +1,35 @@ +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- +-- FUNCTIONS/COMMANDS RELATED TO GRANTING APPROPRIATE PERMISSIONS FOR msar, __msar AND mathesar_types +-- SCHEMAS TO PUBLIC. +-- +-- !!! DO NOT ADD ANY FUNCTIONS PAST THIS POINT !!! +---------------------------------------------------------------------------------------------------- +---------------------------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +msar.grant_usage_on_custom_mathesar_types_to_public() RETURNS void AS $$ +BEGIN + EXECUTE string_agg( + format( + 'GRANT USAGE ON TYPE %1$I.%2$I TO PUBLIC', + pgn.nspname, + pgt.typname + ), + E';\n' + ) || E';\n' + FROM pg_catalog.pg_type AS pgt + JOIN pg_catalog.pg_namespace pgn ON pgn.oid = pgt.typnamespace + WHERE (pgn.nspname = 'msar' + OR pgn.nspname = '__msar' + OR pgn.nspname = 'mathesar_types') + AND (pgt.typtype = 'c' OR pgt.typtype = 'd') + AND pgt.typcategory != 'A'; +END; +$$ LANGUAGE plpgsql; + + +GRANT USAGE ON SCHEMA __msar, msar, mathesar_types TO PUBLIC; +GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar, mathesar_types TO PUBLIC; +GRANT SELECT ON ALL TABLES IN SCHEMA msar, __msar, mathesar_types TO PUBLIC; +SELECT msar.grant_usage_on_custom_mathesar_types_to_public(); diff --git a/db/sql/README.md b/db/sql/README.md new file mode 100644 index 0000000000..08861b9904 --- /dev/null +++ b/db/sql/README.md @@ -0,0 +1,47 @@ +# SQL code + +A substantial amount of Mathesar's application logic is implemented directly in the PostgreSQL database layer. This directory holds the code for that logic, as written in PL/pgSQL. + +Also see our [SQL code standards](./STANDARDS.md) when making changes to the SQL code. + + +## Schemas + +Mathesar installs multiple schemas in the PostgreSQL database for itself to run. We use these internal schemas to hold our custom functions and types. + +### msar + +This is the main schema where we define Mathesar-specific functionality. + +### __msar + +This is a legacy schema that is now **deprecated**. Try to avoid using it if you can. + +It was originally intended for private use within the Mathesar SQL layer (not to be called by the service layer). So if you do use this schema, don't call its functions from within the service layer. + +### mathesar_types + +This schema holds types which the user might utilize in their own tables as well as types for our internal use. + + +## Testing + +SQL code is tested using [pgTAP](https://pgtap.org/). + +- Run all tests: + + ``` + docker exec mathesar_dev_db /bin/bash /sql/run_tests.sh -v + ``` + +- Run tests having names which contain `foo_bar`: + + ``` + docker exec mathesar_dev_db /bin/bash /sql/run_tests.sh -v -x foo_bar + ``` + + +## When modifying the set of Mathesar-internal schemas + +The names of all schemas managed by Mathesar in this SQL is also duplicated in [constants.py](../constants.py). Any changes to these schema name (e.g. adding a new internal schema) must be propagated there too. + diff --git a/db/sql/STANDARDS.md b/db/sql/STANDARDS.md new file mode 100644 index 0000000000..93f12bf518 --- /dev/null +++ b/db/sql/STANDARDS.md @@ -0,0 +1,96 @@ +# SQL Code Standards + +## Naming conventions + +Because function signatures are used informationally in command-generated tables, horizontal space needs to be conserved. As a compromise between readability and terseness, we use the following conventions in variable naming: + +| Object | Naming abbreviation | +| -- | -- | +| attribute | `att` | +| schema | `sch` | +| table | `tab` | +| column | `col` | +| constraint | `con` | +| object | `obj` | +| relation | `rel` | + +Textual names have the suffix `_name`, and numeric identifiers have the suffix `_id`. + +Examples + +- The OID of a table is `tab_id` +- The name of a column is `col_name` +- The attnum of a column is `col_id` + +## Casing + +We use `snake_case` for basically everything — schema names, function names, variables, and types. + +## Code documentation + +Every function should have a docstring-style code documentation block. Follow the syntax of existing functions when writing new ones. + +## Quoting, escaping, SQL injection, and security + +As of mid-2024, Mathesar is in the midst of a gradual transition from one pattern of quoting to another. + +- **The old pattern** is used for all functions within the (deprecated) `__msar` schema and will eventually be refactored out. + + In this pattern, if the name of a database object is accepted as a function argument, stored as an intermediate variable, or returned from a function, then that name is _quoted_ in preparation for it to eventually be used in an SQL statement. For example, a table name of `foo bar` would be passed around as `"foo bar"`. + +- **The new pattern** is used for all functions within the `msar` schema, and will be used going forward. + + In this pattern all names are passed around unquoted for as long as possible. Like above, this applies to names in function arguments, intermediate variables, and return values. They are only quoted at the latest possible point in their execution path, i.e. when they are put into SQL. + + One way to think about this pattern is: + + **If it _can_ be unquoted, then it _should_ be unquoted.** + + For example, if you're dealing with a plain table name such as `foo bar`, then definitely leave it unquoted. + + To hone in on an edge case, let's say you need to qualify that table name with a schema name too. In this case try to handle and store both values (schema name and table name) separately (unquoted) as much as possible. You can use separate variables, separate arguments, or a composite type for return values. As a last resort, you can store the qualified name quoted in an SQL fragment string like `"my schema"."foo bar"`. We have some code like this already, but it's not ideal. Because of the dot in that SQL fragment, there is no way to leave the values unquoted. With fragments of SQL like this, take care to utilize descriptive naming and helpful code comments to be extra clear about when a string represents an SQL fragment. But in general try to avoid passing around SQL fragments if you can. Prefer to pass around raw unquoted values. Or better yet, pass around unique identifiers like OIDs when possible. + +From [OWASP](https://owasp.org/www-project-proactive-controls/v3/en/c4-encode-escape-data): + +> Output encoding is best applied **just before** the content is passed to the target interpreter. If this defense is performed too early in the processing of a request then the encoding or escaping may interfere with the use of the content in other parts of the program. + +## System catalog qualification + +Always qualify system catalog tables by prefixing them with `pg_catalog.`. If you don't, then user-defined tables can shadow the system catalog tables, breaking core functionality. + +## Casting OIDs to JSON + +Always cast OID values to `bigint` before putting them in JSON (or jsonb). + +- _Don't_ leave OID values in JSON without casting. + + This is because (strangely!) raw OID values become _strings_ in JSON unless you cast them. + + ```sql + SELECT jsonb_build_object('foo', 42::oid); -- ❌ Bad + ``` + + > `{"foo": "42"}` + + If you keep OID values as strings in JSON, it can cause bugs. For example, a client may later feed a received OID value back to the DB layer when making a modification to a DB object. If the client sends a stringifed OID back to the DB layer, it might get treated as a _name_ instead of an OID due to function overloading. + +- _Don't_ cast OID values to `integer`. + + This is because the [`oid` type](https://www.postgresql.org/docs/current/datatype-oid.html) is an _unsigned_ 32-bit integer whereas the `integer` type is a _signed_ 32-bit integer. That means it's possible for a database to have OID values which don't fit into the `integer` type. + + For example, putting a large OID value into JSON by casting it to an integer will cause overflow: + + ```SQL + SELECT jsonb_build_object('foo', 3333333333::oid::integer); -- ❌ Bad + ``` + + > `{"foo": -961633963}` + + Instead, cast it to `bigint` + + ```SQL + SELECT jsonb_build_object('foo', 3333333333::oid::bigint); -- ✅ Good + ``` + + > `{"foo": 3333333333}` + diff --git a/db/sql/install.py b/db/sql/install.py index b3736f2910..0e8263a76a 100644 --- a/db/sql/install.py +++ b/db/sql/install.py @@ -1,14 +1,25 @@ import os -from db.connection import load_file_with_engine +from db.connection import load_file_with_conn +from db.types.custom import uri FILE_DIR = os.path.abspath(os.path.dirname(__file__)) -MSAR_SQL = os.path.join(FILE_DIR, '0_msar.sql') -MSAR_AGGREGATE_SQL = os.path.join(FILE_DIR, '3_msar_custom_aggregates.sql') +MSAR_SQL = os.path.join(FILE_DIR, '00_msar.sql') +MSAR_JOIN_SQL = os.path.join(FILE_DIR, '10_msar_joinable_tables.sql') +MSAR_AGGREGATE_SQL = os.path.join(FILE_DIR, '30_msar_custom_aggregates.sql') +MSAR_TYPES = os.path.join(FILE_DIR, '40_msar_types.sql') +MSAR_PERMS = os.path.join(FILE_DIR, '50_msar_permissions.sql') -def install(engine): - """Install SQL pieces using the given engine.""" +def install(conn): + """Install SQL pieces using the given conn.""" with open(MSAR_SQL) as file_handle: - load_file_with_engine(engine, file_handle) + load_file_with_conn(conn, file_handle) + with open(MSAR_JOIN_SQL) as file_handle: + load_file_with_conn(conn, file_handle) with open(MSAR_AGGREGATE_SQL) as custom_aggregates: - load_file_with_engine(engine, custom_aggregates) + load_file_with_conn(conn, custom_aggregates) + with open(MSAR_TYPES) as file_handle: + load_file_with_conn(conn, file_handle) + uri.install_tld_lookup_table(conn) + with open(MSAR_PERMS) as file_handle: + load_file_with_conn(conn, file_handle) diff --git a/db/sql/test_00_msar.sql b/db/sql/test_00_msar.sql new file mode 100644 index 0000000000..3bf9e86f56 --- /dev/null +++ b/db/sql/test_00_msar.sql @@ -0,0 +1,5472 @@ +DROP EXTENSION IF EXISTS pgtap CASCADE; +CREATE EXTENSION IF NOT EXISTS pgtap; + +-- msar.drop_columns ------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_drop_columns() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE atable (dodrop1 integer, dodrop2 integer, dontdrop text); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_columns_oid() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_drop_columns(); + rel_id := 'atable'::regclass::oid; + PERFORM msar.drop_columns(rel_id, 1, 2); + RETURN NEXT has_column( + 'atable', 'dontdrop', 'Keeps correct columns' + ); + RETURN NEXT hasnt_column( + 'atable', 'dodrop1', 'Drops correct columns 1' + ); + RETURN NEXT hasnt_column( + 'atable', 'dodrop2', 'Drops correct columns 2' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_columns_ne_oid() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE "12345" (bleh text, bleh2 numeric); + PERFORM msar.drop_columns(12345, 1); + RETURN NEXT has_column( + '12345', 'bleh', 'Doesn''t drop columns of stupidly-named table' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_columns_names() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_drop_columns(); + PERFORM msar.drop_columns('public', 'atable', 'dodrop1', 'dodrop2'); + RETURN NEXT has_column( + 'atable', 'dontdrop', 'Dropper keeps correct columns' + ); + RETURN NEXT hasnt_column( + 'atable', 'dodrop1', 'Dropper drops correct columns 1' + ); + RETURN NEXT hasnt_column( + 'atable', 'dodrop2', 'Dropper drops correct columns 2' + ); +END; +$$ LANGUAGE plpgsql; + + +-- msar.drop_table --------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_drop_tables() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE dropme (id SERIAL PRIMARY KEY, col1 integer); +END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION test_drop_table_oid() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_drop_tables(); + rel_id := 'dropme'::regclass::oid; + PERFORM msar.drop_table(tab_id => rel_id, cascade_ => false); + RETURN NEXT hasnt_table('dropme', 'Drops table'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_table_oid_restricted_fkey() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_drop_tables(); + rel_id := 'dropme'::regclass::oid; + CREATE TABLE + dependent (id SERIAL PRIMARY KEY, col1 integer REFERENCES dropme); + RETURN NEXT throws_ok( + format('SELECT msar.drop_table(tab_id => %s, cascade_ => false);', rel_id), + '2BP01', + 'cannot drop table dropme because other objects depend on it', + 'Table dropper throws for dependent objects' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_table_oid_cascade_fkey() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_drop_tables(); + rel_id := 'dropme'::regclass::oid; + CREATE TABLE + dependent (id SERIAL PRIMARY KEY, col1 integer REFERENCES dropme); + PERFORM msar.drop_table(tab_id => rel_id, cascade_ => true); + RETURN NEXT hasnt_table('dropme', 'Drops table with dependent using CASCADE'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_table_name() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_drop_tables(); + PERFORM msar.drop_table( + sch_name => 'public', + tab_name => 'dropme', + cascade_ => false, + if_exists => false + ); + RETURN NEXT hasnt_table('dropme', 'Drops table'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_table_name_missing_if_exists() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_drop_tables(); + PERFORM msar.drop_table( + sch_name => 'public', + tab_name => 'dropmenew', + cascade_ => false, + if_exists => true + ); + RETURN NEXT has_table('dropme', 'Drops table with IF EXISTS'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_table_name_missing_no_if_exists() RETURNS SETOF TEXT AS $$ +BEGIN + RETURN NEXT throws_ok( + 'SELECT msar.drop_table(''public'', ''doesntexist'', false, false);', + '42P01', + 'table "doesntexist" does not exist', + 'Table dropper throws for missing table' + ); +END; +$$ LANGUAGE plpgsql; + + +-- msar.build_type_text ---------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION test_build_type_text() RETURNS SETOF TEXT AS $$/* +Note that many type building tests are in the column adding section, to make sure the strings the +function writes are as expected, and also valid type definitions. +*/ + +BEGIN + RETURN NEXT is(msar.build_type_text('{}'), 'text'); + RETURN NEXT is(msar.build_type_text(null), 'text'); + RETURN NEXT is(msar.build_type_text('{"name": "varchar"}'), 'character varying'); + CREATE DOMAIN msar.testtype AS text CHECK (value LIKE '%test'); + RETURN NEXT is( + msar.build_type_text('{"schema": "msar", "name": "testtype"}'), 'msar.testtype' + ); +END; +$$ LANGUAGE plpgsql; + + +-- __msar.process_col_def_jsonb ---------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION test_process_col_def_jsonb() RETURNS SETOF TEXT AS $f$ +BEGIN + RETURN NEXT is( + __msar.process_col_def_jsonb(0, '[{}, {}]'::jsonb, false), + ARRAY[ + ('"Column 1"', 'text', null, null, false, null), + ('"Column 2"', 'text', null, null, false, null) + ]::__msar.col_def[], + 'Empty columns should result in defaults' + ); + RETURN NEXT is( + __msar.process_col_def_jsonb(0, '[{"name": "id"}]'::jsonb, false), + null, + 'Column definition processing should ignore "id" column' + ); + RETURN NEXT is( + __msar.process_col_def_jsonb(0, '[{}, {}]'::jsonb, false, true), + ARRAY[ + ('id', 'integer', true, null, true, 'Mathesar default ID column'), + ('"Column 1"', 'text', null, null, false, null), + ('"Column 2"', 'text', null, null, false, null) + ]::__msar.col_def[], + 'Column definition processing add "id" column' + ); + RETURN NEXT is( + __msar.process_col_def_jsonb(0, '[{"description": "Some comment"}]'::jsonb, false), + ARRAY[ + ('"Column 1"', 'text', null, null, false, '''Some comment''') + ]::__msar.col_def[], + 'Comments should be sanitized' + ); +END; +$f$ LANGUAGE plpgsql; + + +-- msar.add_columns -------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_add_columns() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE add_col_testable (id serial primary key, col1 integer, col2 varchar); +END; +$$ LANGUAGE plpgsql; + + +-- TODO: Figure out a way to parameterize these +CREATE OR REPLACE FUNCTION test_add_columns_fullspec_text() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := $j$[ + {"name": "tcol", "type": {"name": "text"}, "not_null": true, "default": "my super default"} + ]$j$; +BEGIN + PERFORM __setup_add_columns(); + RETURN NEXT is( + msar.add_columns('add_col_testable'::regclass::oid, col_create_arr), '{4}'::smallint[] + ); + RETURN NEXT col_not_null('add_col_testable', 'tcol'); + RETURN NEXT col_type_is('add_col_testable', 'tcol', 'text'); + RETURN NEXT col_default_is('add_col_testable', 'tcol', 'my super default'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_minspec_text() RETURNS SETOF TEXT AS $f$ +/* +This tests the default settings. When not given, the defautl column should be nullable and have no +default value. The name should be "Column ", where is the attnum of the added column. +*/ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "text"}}]'; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_is_null('add_col_testable', 'Column 4'); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'text'); + RETURN NEXT col_hasnt_default('add_col_testable', 'Column 4'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_comment() RETURNS SETOF TEXT AS $f$ +DECLARE + col_name text := 'tcol'; + description text := 'Some; comment with a semicolon'; + tab_id integer; + col_id integer; + col_create_arr jsonb; +BEGIN + PERFORM __setup_add_columns(); + tab_id := 'add_col_testable'::regclass::oid; + col_create_arr := format('[{"name": "%s", "description": "%s"}]', col_name, description); + PERFORM msar.add_columns(tab_id, col_create_arr); + col_id := msar.get_attnum(tab_id, col_name); + RETURN NEXT is( + msar.col_description(tab_id, col_id), + description + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_multi_default_name() RETURNS SETOF TEXT AS $f$ +/* +This tests the default settings. When not given, the defautl column should be nullable and have no +default value. The name should be "Column ", where is the attnum of the added column. +*/ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "text"}}, {"type": {"name": "numeric"}}]'; +BEGIN + PERFORM __setup_add_columns(); + RETURN NEXT is( + msar.add_columns('add_col_testable'::regclass::oid, col_create_arr), '{4, 5}'::smallint[] + ); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'text'); + RETURN NEXT col_type_is('add_col_testable', 'Column 5', 'numeric'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_numeric_def() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "numeric"}, "default": 3.14159}]'; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'numeric'); + RETURN NEXT col_default_is('add_col_testable', 'Column 4', 3.14159); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_numeric_prec() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "numeric", "options": {"precision": 3}}}]'; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'numeric(3,0)'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_numeric_prec_scale() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := $j$[ + {"type": {"name": "numeric", "options": {"precision": 3, "scale": 2}}} + ]$j$; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'numeric(3,2)'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_caps_numeric() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "NUMERIC"}}]'; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'numeric'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_varchar_length() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "varchar", "options": {"length": 128}}}]'; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'character varying(128)'); +END; +$f$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION test_add_columns_interval_precision() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "interval", "options": {"precision": 6}}}]'; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'interval(6)'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_interval_fields() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "interval", "options": {"fields": "year"}}}]'; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'interval year'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_interval_fields_prec() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := $j$ + [{"type": {"name": "interval", "options": {"fields": "second", "precision": 3}}}] + $j$; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'interval second(3)'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_timestamp_prec() RETURNS SETOF TEXT AS $f$ +DECLARE + col_create_arr jsonb := $j$ + [{"type": {"name": "timestamp", "options": {"precision": 3}}}] + $j$; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'timestamp(3) without time zone'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_timestamp_raw_default() RETURNS SETOF TEXT AS $f$ +/* +This test will fail if the default is being sanitized, but will succeed if it's not. +*/ +DECLARE + col_create_arr jsonb := '[{"type": {"name": "timestamp"}, "default": "now()::timestamp"}]'; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr, raw_default => true); + RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'timestamp without time zone'); + RETURN NEXT col_default_is( + 'add_col_testable', 'Column 4', '(now())::timestamp without time zone' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_sanitize_default() RETURNS SETOF TEXT AS $f$ +/* +This test will succeed if the default is being sanitized, but will fail if it's not. + +It's important to check that we're careful with SQL submitted from python. +*/ +DECLARE + col_create_arr jsonb := $j$ + [{"type": {"name": "text"}, "default": "null; drop table add_col_testable"}] + $j$; +BEGIN + PERFORM __setup_add_columns(); + PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr, raw_default => false); + RETURN NEXT has_table('add_col_testable'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_columns_errors() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_add_columns(); + RETURN NEXT throws_ok( + format( + 'SELECT msar.add_columns(tab_id => %s, col_defs => ''%s'');', + 'add_col_testable'::regclass::oid, + '[{"type": {"name": "taxt"}}]'::jsonb + ), + '42704', + 'type "taxt" does not exist' + ); + RETURN NEXT CASE WHEN pg_version_num() < 150000 + THEN throws_ok( + format( + 'SELECT msar.add_columns(tab_id => %s, col_defs => ''%s'');', + 'add_col_testable'::regclass::oid, + '[{"type": {"name": "numeric", "options": {"scale": 23, "precision": 3}}}]'::jsonb + ), + '22023', + 'NUMERIC scale 23 must be between 0 and precision 3' + ) + ELSE skip('Numeric scale can be negative or greater than precision as of v15') + END; +END; +$f$ LANGUAGE plpgsql; + + +-- msar.copy_column -------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_copy_column() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE copy_coltest ( + id SERIAL PRIMARY KEY, + col1 varchar, + col2 varchar NOT NULL, + col3 numeric(5, 3) DEFAULT 5, + col4 timestamp without time zone DEFAULT NOW(), + col5 timestamp without time zone NOT NULL DEFAULT NOW(), + col6 interval second(3), + "col space" varchar + ); + ALTER TABLE copy_coltest ADD UNIQUE (col1, col2); + INSERT INTO copy_coltest VALUES + (DEFAULT, 'abc', 'def', 5.234, '1999-01-08 04:05:06', '1999-01-09 04:05:06', '4:05:06', 'ghi'), + (DEFAULT, 'jkl', 'mno', null, null, '1999-02-08 04:05:06', '3 4:05:07', 'pqr'), + (DEFAULT, null, 'stu', DEFAULT, DEFAULT, DEFAULT, null, 'vwx') + ; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_copies_unique() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 2::smallint, 'col1 supercopy', true, true + ); + RETURN NEXT col_type_is('copy_coltest', 'col1 supercopy', 'character varying'); + RETURN NEXT col_is_null('copy_coltest', 'col1 supercopy'); + RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1', 'col2']); + RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1 supercopy', 'col2']); + RETURN NEXT results_eq( + 'SELECT "col1 supercopy" FROM copy_coltest ORDER BY id', + $v$VALUES ('abc'::varchar), ('jkl'::varchar), (null)$v$ + ); + RETURN NEXT lives_ok( + $u$UPDATE copy_coltest SET "col1 supercopy"='abc' WHERE "col1 supercopy"='jkl'$u$, + 'Copied col should not have a single column unique constraint' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_copies_unique_and_nnull() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 3::smallint, null, true, true + ); + RETURN NEXT col_type_is('copy_coltest', 'col2 1', 'character varying'); + RETURN NEXT col_not_null('copy_coltest', 'col2 1'); + RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1', 'col2']); + RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1', 'col2 1']); + RETURN NEXT results_eq( + 'SELECT "col2 1" FROM copy_coltest', + $v$VALUES ('def'::varchar), ('mno'::varchar), ('stu'::varchar)$v$ + ); + RETURN NEXT lives_ok( + $u$UPDATE copy_coltest SET "col2 1"='def' WHERE "col2 1"='mno'$u$, + 'Copied col should not have a single column unique constraint' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_false_copy_data_and_con() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 3::smallint, null, false, false + ); + RETURN NEXT col_type_is('copy_coltest', 'col2 1', 'character varying'); + RETURN NEXT col_is_null('copy_coltest', 'col2 1'); + RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1', 'col2']); + RETURN NEXT results_eq( + 'SELECT "col2 1" FROM copy_coltest', + $v$VALUES (null::varchar), (null::varchar), (null::varchar)$v$ + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_num_options_static_default() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 4::smallint, null, true, false + ); + RETURN NEXT col_type_is('copy_coltest', 'col3 1', 'numeric(5,3)'); + RETURN NEXT col_is_null('copy_coltest', 'col3 1'); + RETURN NEXT col_default_is('copy_coltest', 'col3 1', '5'); + RETURN NEXT results_eq( + 'SELECT "col3 1" FROM copy_coltest', + $v$VALUES (5.234), (null), (5)$v$ + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_nullable_dynamic_default() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 5::smallint, null, true, false + ); + RETURN NEXT col_type_is('copy_coltest', 'col4 1', 'timestamp without time zone'); + RETURN NEXT col_is_null('copy_coltest', 'col4 1'); + RETURN NEXT col_default_is('copy_coltest', 'col4 1', 'now()'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_non_null_dynamic_default() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 6::smallint, null, true, true + ); + RETURN NEXT col_type_is('copy_coltest', 'col5 1', 'timestamp without time zone'); + RETURN NEXT col_not_null('copy_coltest', 'col5 1'); + RETURN NEXT col_default_is('copy_coltest', 'col5 1', 'now()'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_interval_notation() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 7::smallint, null, false, false + ); + RETURN NEXT col_type_is('copy_coltest', 'col6 1', 'interval second(3)'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_space_name() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 8::smallint, null, false, false + ); + RETURN NEXT col_type_is('copy_coltest', 'col space 1', 'character varying'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_pkey() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 1::smallint, null, true, true + ); + RETURN NEXT col_type_is('copy_coltest', 'id 1', 'integer'); + RETURN NEXT col_not_null('copy_coltest', 'id 1'); + RETURN NEXT col_default_is( + 'copy_coltest', 'id 1', $d$nextval('copy_coltest_id_seq'::regclass)$d$ + ); + RETURN NEXT col_is_pk('copy_coltest', 'id'); + RETURN NEXT col_isnt_pk('copy_coltest', 'id 1'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_column_increment_name() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_copy_column(); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 2::smallint, null, true, true + ); + RETURN NEXT has_column('copy_coltest', 'col1 1'); + PERFORM msar.copy_column( + 'copy_coltest'::regclass::oid, 2::smallint, null, true, true + ); + RETURN NEXT has_column('copy_coltest', 'col1 2'); +END; +$f$ LANGUAGE plpgsql; + +-- msar.add_constraints ---------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_add_pkey() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE add_pkeytest (col1 serial, col2 serial, col3 text); + INSERT INTO add_pkeytest (col1, col2, col3) VALUES + (DEFAULT, DEFAULT, 'abc'), + (DEFAULT, DEFAULT, 'def'), + (DEFAULT, DEFAULT, 'abc'), + (DEFAULT, DEFAULT, 'def'), + (DEFAULT, DEFAULT, 'abc'), + (DEFAULT, DEFAULT, 'def'), + (DEFAULT, DEFAULT, 'abc'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_pkey_id_fullspec() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := $j$[ + {"name": "mysuperkey", "type": "p", "columns": [1], "deferrable": true} + ]$j$; + created_name text; + deferrable_ boolean; +BEGIN + PERFORM __setup_add_pkey(); + PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); + RETURN NEXT col_is_pk('add_pkeytest', 'col1'); + created_name := conname FROM pg_constraint + WHERE conrelid='add_pkeytest'::regclass::oid AND conkey='{1}'; + RETURN NEXT is(created_name, 'mysuperkey'); + deferrable_ := condeferrable FROM pg_constraint WHERE conname='mysuperkey'; + RETURN NEXT is(deferrable_, true); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_pkey_id_defname() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"type": "p", "columns": [1]}]'; + created_name text; +BEGIN + PERFORM __setup_add_pkey(); + PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); + RETURN NEXT col_is_pk('add_pkeytest', 'col1'); + created_name := conname FROM pg_constraint + WHERE conrelid='add_pkeytest'::regclass::oid AND conkey='{1}'; + RETURN NEXT is(created_name, 'add_pkeytest_pkey'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_pkey_id_multicol() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"type": "p", "columns": [1, 2]}]'; + created_name text; +BEGIN + PERFORM __setup_add_pkey(); + PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); + RETURN NEXT col_is_pk('add_pkeytest', ARRAY['col1', 'col2']); + created_name := conname FROM pg_constraint + WHERE conrelid='add_pkeytest'::regclass::oid AND conkey='{1, 2}'; + RETURN NEXT is(created_name, 'add_pkeytest_pkey'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_pkey_tab_name_singlecol() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"type": "p", "columns": [1]}]'; +BEGIN + PERFORM __setup_add_pkey(); + PERFORM msar.add_constraints('public', 'add_pkeytest', con_create_arr); + RETURN NEXT col_is_pk('add_pkeytest', 'col1'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_pkey_col_name_singlecol() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"type": "p", "columns": ["col1"]}]'; +BEGIN + PERFORM __setup_add_pkey(); + PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); + RETURN NEXT col_is_pk('add_pkeytest', 'col1'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_pkey_col_name_multicol() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"type": "p", "columns": ["col1", "col2"]}]'; +BEGIN + PERFORM __setup_add_pkey(); + PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); + RETURN NEXT col_is_pk('add_pkeytest', ARRAY['col1', 'col2']); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_pkey_col_mix_multicol() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"type": "p", "columns": [1, "col2"]}]'; +BEGIN + PERFORM __setup_add_pkey(); + PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); + RETURN NEXT col_is_pk('add_pkeytest', ARRAY['col1', 'col2']); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_add_fkey() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE add_fk_users (id serial primary key, fname TEXT, lname TEXT, phoneno TEXT); + INSERT INTO add_fk_users (fname, lname, phoneno) VALUES + ('alice', 'smith', '123 4567'), + ('bob', 'jones', '234 5678'), + ('eve', 'smith', '345 6789'); + CREATE TABLE add_fk_comments (id serial primary key, user_id integer, comment text); + INSERT INTO add_fk_comments (user_id, comment) VALUES + (1, 'aslfkjasfdlkjasdfl'), + (2, 'aslfkjasfdlkjasfl'), + (3, 'aslfkjasfdlkjsfl'), + (1, 'aslfkjasfdlkasdfl'), + (2, 'aslfkjasfkjasdfl'), + (2, 'aslfkjasflkjasdfl'), + (3, 'aslfkjasfdjasdfl'), + (1, 'aslfkjasfkjasdfl'), + (1, 'fkjasfkjasdfl'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_fkey_id_fullspec() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb; +BEGIN + PERFORM __setup_add_fkey(); + con_create_arr := format( + $j$[ + { + "name": "superfkey", + "type": "f", + "columns": [2], + "fkey_relation_id": %s, + "fkey_columns": [1], + "fkey_update_action": "a", + "fkey_delete_action": "a", + "fkey_match_type": "f" + } + ]$j$, 'add_fk_users'::regclass::oid + ); + PERFORM msar.add_constraints('add_fk_comments'::regclass::oid, con_create_arr); + RETURN NEXT fk_ok( + 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' + ); + RETURN NEXT results_eq( + $h$ + SELECT conname, confupdtype, confdeltype, confmatchtype + FROM pg_constraint WHERE conname='superfkey' + $h$, + $w$VALUES ('superfkey'::name, 'a'::"char", 'a'::"char", 'f'::"char")$w$ + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION fkey_options_eq("char", "char", "char") RETURNS TEXT AS $f$ +DECLARE + con_create_arr jsonb; +BEGIN + PERFORM __setup_add_fkey(); + con_create_arr := format( + $j$[ + { + "name": "superfkey", + "type": "f", + "columns": [2], + "fkey_relation_id": %s, + "fkey_update_action": "%s", + "fkey_delete_action": "%s", + "fkey_match_type": "%s" + } + ]$j$, + 'add_fk_users'::regclass::oid, $1, $2, $3 + ); + PERFORM msar.add_constraints('add_fk_comments'::regclass::oid, con_create_arr); + RETURN results_eq( + $h$ + SELECT conname, confupdtype, confdeltype, confmatchtype + FROM pg_constraint WHERE conname='superfkey' + $h$, + format( + $w$VALUES ('superfkey'::name, '%s'::"char", '%s'::"char", '%s'::"char")$w$, + $1, $2, $3 + ), + format('Should have confupdtype %s, confdeltype %s, and confmatchtype %s', $1, $2, $3) + ); +END; +$f$ LANGUAGE plpgsql; + + +-- Options for fkey delete, update action and match type +-- a = no action, r = restrict, c = cascade, n = set null, d = set default +-- f = full, s = simple +-- Note that partial match is not implemented. + + +CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_aas() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM fkey_options_eq('a', 'a', 's'); + RETURN NEXT fk_ok( + 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_arf() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM fkey_options_eq('a', 'r', 'f'); + RETURN NEXT fk_ok( + 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_rrf() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM fkey_options_eq('r', 'r', 'f'); + RETURN NEXT fk_ok( + 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_rrf() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM fkey_options_eq('r', 'r', 'f'); + RETURN NEXT fk_ok( + 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_ccf() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM fkey_options_eq('c', 'c', 'f'); + RETURN NEXT fk_ok( + 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_nnf() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM fkey_options_eq('n', 'n', 'f'); + RETURN NEXT fk_ok( + 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_ddf() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM fkey_options_eq('d', 'd', 'f'); + RETURN NEXT fk_ok( + 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_add_unique() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE add_unique_con (id serial primary key, col1 integer, col2 integer, col3 integer); + INSERT INTO add_unique_con (col1, col2, col3) VALUES + (1, 1, 1), + (2, 2, 3), + (3, 3, 3); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraints_unique_single() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"name": "myuniqcons", "type": "u", "columns": [2]}]'; +BEGIN + PERFORM __setup_add_unique(); + PERFORM msar.add_constraints('add_unique_con'::regclass::oid, con_create_arr); + RETURN NEXT col_is_unique('add_unique_con', ARRAY['col1']); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraints_unique_multicol() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"name": "myuniqcons", "type": "u", "columns": [2, 3]}]'; +BEGIN + PERFORM __setup_add_unique(); + PERFORM msar.add_constraints('add_unique_con'::regclass::oid, con_create_arr); + RETURN NEXT col_is_unique('add_unique_con', ARRAY['col1', 'col2']); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_duplicate_name() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"name": "myuniqcons", "type": "u", "columns": [2]}]'; + con_create_arr2 jsonb := '[{"name": "myuniqcons", "type": "u", "columns": [3]}]'; +BEGIN + PERFORM __setup_add_unique(); + PERFORM msar.add_constraints('add_unique_con'::regclass::oid, con_create_arr); + RETURN NEXT throws_ok( + format( + 'SELECT msar.add_constraints(%s, ''%s'');', 'add_unique_con'::regclass::oid, con_create_arr + ), + '42P07', + 'relation "myuniqcons" already exists', + 'Throws error for duplicate constraint name' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_copy_unique() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE copy_unique_con + (id serial primary key, col1 integer, col2 integer, col3 integer, col4 integer); + ALTER TABLE copy_unique_con ADD CONSTRAINT olduniqcon UNIQUE (col1, col2, col3); + INSERT INTO copy_unique_con (col1, col2, col3, col4) VALUES + (1, 2, 5, 9), + (2, 3, 6, 0), + (3, 4, 8, 1); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_copy_constraint() RETURNS SETOF TEXT AS $f$ +DECLARE + orig_oid oid; +BEGIN + PERFORM __setup_copy_unique(); + orig_oid := oid + FROM pg_constraint + WHERE conrelid='copy_unique_con'::regclass::oid AND conname='olduniqcon'; + PERFORM msar.copy_constraint(orig_oid, 4::smallint, 5::smallint); + RETURN NEXT col_is_unique('copy_unique_con', ARRAY['col1', 'col2', 'col4']); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_constraint_errors() RETURNS SETOF TEXT AS $f$ +DECLARE + con_create_arr jsonb := '[{"type": "p", "columns": [7]}]'::jsonb; +BEGIN + PERFORM __setup_add_pkey(); + RETURN NEXT throws_ok( + format( + 'SELECT msar.add_constraints(%s, ''%s'');', + 'add_pkeytest'::regclass::oid, + '[{"type": "p", "columns": [7]}]'::jsonb + ), + '42601', + 'syntax error at end of input', + 'Throws error for nonexistent attnum' + ); + RETURN NEXT throws_ok( + format( + 'SELECT msar.add_constraints(%s, ''%s'');', 234, '[{"type": "p", "columns": [1]}]'::jsonb + ), + '42601', + 'syntax error at or near "234"', + 'Throws error for nonexistent table ID' + ); + RETURN NEXT throws_ok( + format( + 'SELECT msar.add_constraints(%s, ''%s'');', + 'add_pkeytest'::regclass::oid, + '[{"type": "k", "columns": [1]}]'::jsonb + ), + '42601', + 'syntax error at end of input', + 'Throws error for nonexistent constraint type' + ); + RETURN NEXT throws_ok( + format( + 'SELECT msar.add_constraints(%s, ''%s'');', + 'add_pkeytest'::regclass::oid, + '[{"type": "p", "columns": [1, "col1"]}]'::jsonb + ), + '42701', + 'column "col1" appears twice in primary key constraint', + 'Throws error for nonexistent duplicate pkey col' + ); +END; +$f$ LANGUAGE plpgsql; + + +-- msar.drop_constraint --------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_drop_constraint() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE category( + id serial primary key, + item_category text, + CONSTRAINT uq_cat UNIQUE(item_category) + ); + CREATE TABLE orders ( + id serial primary key, + item_name text, + price integer, + category_id integer, + CONSTRAINT fk_cat FOREIGN KEY(category_id) REFERENCES category(id) + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_constraint() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_drop_constraint(); + PERFORM msar.drop_constraint( + sch_name => 'public', + tab_name => 'category', + con_name => 'uq_cat' + ); + PERFORM msar.drop_constraint( + sch_name => 'public', + tab_name => 'orders', + con_name => 'fk_cat' + ); + /* There isn't a col_isnt_unique function in pgTAP so we are improvising + by adding 2 same values here.*/ + INSERT INTO category(item_category) VALUES ('tech'),('tech'); + RETURN NEXT col_isnt_fk('orders', 'category_id'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_constraint_using_oid() RETURNS SETOF TEXT AS $$ +DECLARE + uq_cat_oid oid; + fk_cat_oid oid; +BEGIN + PERFORM __setup_drop_constraint(); + uq_cat_oid := oid FROM pg_constraint WHERE conname='uq_cat'; + fk_cat_oid := oid FROM pg_constraint WHERE conname='fk_cat'; + PERFORM msar.drop_constraint( + tab_id => 'category'::regclass::oid, + con_id => uq_cat_oid + ); + PERFORM msar.drop_constraint( + tab_id => 'orders'::regclass::oid, + con_id => fk_cat_oid + ); + /* There isn't a col_isnt_unique function in pgTAP so we are improvising + by adding 2 same values here.*/ + INSERT INTO category(item_category) VALUES ('tech'),('tech'); + RETURN NEXT col_isnt_fk('orders', 'category_id'); +END; +$$ LANGUAGE plpgsql; + + +-- msar.create_link ------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_link_tables() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE actors (id SERIAL PRIMARY KEY, actor_name text); + INSERT INTO actors(actor_name) VALUES + ('Cillian Murphy'), + ('Leonardo DiCaprio'), + ('Margot Robbie'), + ('Ryan Gosling'), + ('Ana de Armas'); + CREATE TABLE movies (id SERIAL PRIMARY KEY, movie_name text); + INSERT INTO movies(movie_name) VALUES + ('The Wolf of Wall Street'), + ('Inception'), + ('Oppenheimer'), + ('Barbie'), + ('Blade Runner 2049'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_foreign_key_column() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_link_tables(); + PERFORM msar.add_foreign_key_column( + frel_id => 'actors'::regclass::oid, + rel_id => 'movies'::regclass::oid, + col_name => 'act_id' + ); + RETURN NEXT has_column('movies', 'act_id'); + RETURN NEXT col_type_is('movies', 'act_id', 'integer'); + RETURN NEXT col_is_fk('movies', 'act_id'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_create_one_to_one_link() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_link_tables(); + PERFORM msar.add_foreign_key_column( + frel_id => 'actors'::regclass::oid, + rel_id => 'movies'::regclass::oid, + col_name => 'act_id', + unique_link => true + ); + RETURN NEXT has_column('movies', 'act_id'); + RETURN NEXT col_type_is('movies', 'act_id', 'integer'); + RETURN NEXT col_is_fk('movies', 'act_id'); + RETURN NEXT col_is_unique('movies', 'act_id'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_mapping_table() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_link_tables(); + PERFORM msar.add_mapping_table( + sch_id => 'public'::regnamespace::oid, + tab_name => 'movies_actors', + mapping_columns => jsonb_build_array( + jsonb_build_object('column_name', 'movie_id', 'referent_table_oid', 'movies'::regclass::oid), + jsonb_build_object('column_name', 'actor_id', 'referent_table_oid', 'actors'::regclass::oid) + ) + ); + RETURN NEXT has_table('public'::name, 'movies_actors'::name); + RETURN NEXT has_column('movies_actors', 'movie_id'); + RETURN NEXT col_type_is('movies_actors', 'movie_id', 'integer'); + RETURN NEXT col_is_fk('movies_actors', 'movie_id'); + RETURN NEXT has_column('movies_actors', 'actor_id'); + RETURN NEXT col_type_is('movies_actors', 'actor_id', 'integer'); + RETURN NEXT col_is_fk('movies_actors', 'actor_id'); +END; +$$ LANGUAGE plpgsql; + + +-- msar.schema_ddl -------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION test_create_schema_without_description() RETURNS SETOF TEXT AS $$ +DECLARE sch_oid oid; +BEGIN + SELECT msar.create_schema('foo bar', NULL) ->> 'oid' INTO sch_oid; + RETURN NEXT has_schema('foo bar'); + RETURN NEXT is(sch_oid, msar.get_schema_oid('foo bar')); + RETURN NEXT is(obj_description(sch_oid), NULL); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_create_schema_with_description() RETURNS SETOF TEXT AS $$ +DECLARE sch_oid oid; +BEGIN + SELECT msar.create_schema('foo bar', NULL, 'yay') ->> 'oid' INTO sch_oid; + RETURN NEXT has_schema('foo bar'); + RETURN NEXT is(sch_oid, msar.get_schema_oid('foo bar')); + RETURN NEXT is(obj_description(sch_oid), 'yay'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_create_schema_that_already_exists() RETURNS SETOF TEXT AS $t$ +DECLARE sch_oid oid; +BEGIN + SELECT msar.create_schema('foo bar', NULL) ->> 'oid' INTO sch_oid; + RETURN NEXT throws_ok($$SELECT msar.create_schema('foo bar', NULL)$$, '42P06'); + RETURN NEXT is(msar.create_schema_if_not_exists('foo bar'), sch_oid); +END; +$t$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_drop_schema() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE SCHEMA drop_test_schema; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_schema_using_name() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_drop_schema(); + PERFORM msar.drop_schema( + sch_name => 'drop_test_schema', + cascade_ => false + ); + RETURN NEXT hasnt_schema('drop_test_schema'); + RETURN NEXT throws_ok( + $d$ + SELECT msar.drop_schema( + sch_name => 'drop_non_existing_schema', + cascade_ => false + ) + $d$, + '3F000' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_schema_using_oid() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_drop_schema(); + PERFORM msar.drop_schema( + sch_id => 'drop_test_schema'::regnamespace::oid, + cascade_ => false + ); + RETURN NEXT hasnt_schema('drop_test_schema'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_schema_using_invalid_oid() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_drop_schema(); + RETURN NEXT throws_ok( + $d$ + SELECT msar.drop_schema( + sch_id => 0, + cascade_ => false + ) + $d$, + '3F000' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_schema_with_dependent_obj() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE SCHEMA schema1; + CREATE TABLE schema1.actors ( + id SERIAL PRIMARY KEY, + actor_name TEXT + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_schema_cascade() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_schema_with_dependent_obj(); + PERFORM msar.drop_schema( + sch_name => 'schema1', + cascade_ => true + ); + RETURN NEXT hasnt_schema('schema1'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_drop_schema_restricted() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_schema_with_dependent_obj(); + RETURN NEXT throws_ok( + $d$ + SELECT msar.drop_schema( + sch_name => 'schema1', + cascade_ => false + ) + $d$, + '2BP01' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_patch_schema() RETURNS SETOF TEXT AS $$ +DECLARE sch_oid oid; +BEGIN + CREATE SCHEMA foo; + SELECT msar.get_schema_oid('foo') INTO sch_oid; + + PERFORM msar.patch_schema('foo', '{"name": "altered"}'); + RETURN NEXT hasnt_schema('foo'); + RETURN NEXT has_schema('altered'); + RETURN NEXT is(obj_description(sch_oid), NULL); + RETURN NEXT is(msar.get_schema_name(sch_oid), 'altered'); + + PERFORM msar.patch_schema(sch_oid, '{"description": "yay"}'); + RETURN NEXT is(obj_description(sch_oid), 'yay'); + + -- Description is removed when NULL is passed. + PERFORM msar.patch_schema(sch_oid, '{"description": null}'); + RETURN NEXT is(obj_description(sch_oid), NULL); + + -- Description is removed when an empty string is passed. + PERFORM msar.patch_schema(sch_oid, '{"description": ""}'); + RETURN NEXT is(obj_description(sch_oid), NULL); + + PERFORM msar.patch_schema(sch_oid, '{"name": "NEW", "description": "WOW"}'); + RETURN NEXT has_schema('NEW'); + RETURN NEXT is(msar.get_schema_name(sch_oid), 'NEW'); + RETURN NEXT is(obj_description(sch_oid), 'WOW'); + + -- Patching should be idempotent + PERFORM msar.patch_schema(sch_oid, '{"name": "NEW", "description": "WOW"}'); + RETURN NEXT has_schema('NEW'); + RETURN NEXT is(msar.get_schema_name(sch_oid), 'NEW'); + RETURN NEXT is(obj_description(sch_oid), 'WOW'); +END; +$$ LANGUAGE plpgsql; + + +-- msar.alter_table + +CREATE OR REPLACE FUNCTION __setup_alter_table() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE alter_this_table(id INT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, col1 TEXT); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_rename_table() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_alter_table(); + PERFORM msar.rename_table( + sch_name =>'public', + old_tab_name => 'alter_this_table', + new_tab_name => 'renamed_table' + ); + RETURN NEXT hasnt_table('alter_this_table'); + RETURN NEXT has_table('renamed_table'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_rename_table_with_same_name() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_alter_table(); + PERFORM msar.rename_table( + sch_name =>'public', + old_tab_name => 'alter_this_table', + new_tab_name => 'alter_this_table' + ); + RETURN NEXT has_table('alter_this_table'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_rename_table_using_oid() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_alter_table(); + PERFORM msar.rename_table( + tab_id => 'alter_this_table'::regclass::oid, + new_tab_name => 'renamed_table' + ); + RETURN NEXT hasnt_table('alter_this_table'); + RETURN NEXT has_table('renamed_table'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_comment_on_table() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_alter_table(); + PERFORM msar.comment_on_table( + sch_name =>'public', + tab_name => 'alter_this_table', + comment_ => 'This is a comment!' + ); + RETURN NEXT is(obj_description('alter_this_table'::regclass::oid), 'This is a comment!'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_comment_on_table_using_oid() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_alter_table(); + PERFORM msar.comment_on_table( + tab_id => 'alter_this_table'::regclass::oid, + comment_ => 'This is a comment!' + ); + RETURN NEXT is(obj_description('alter_this_table'::regclass::oid), 'This is a comment!'); +END; +$$ LANGUAGE plpgsql; + + +-- msar.infer_table_column_data_types -------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_type_inference() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE "Types Test" ( +-- We can't test custom types in this context. + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "Empty" text, + "Boolean" text, + "Date" text, + "Numeric" text, + "Interval" text, + "Text" text +); +INSERT INTO "Types Test" + ("Boolean", "Date", "Numeric", "Interval", "Text") +VALUES + ('0', '2000-01-01', '0', '3 days', 'cat'), + ('1', '6/23/2004', '3.14', '3 hours', 'bat'), + ('t', 'May-2007-29', '-234.22', '3 minutes', 'rat'), + ('false', '20200909', '1', '3 seconds', 'mat'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_infer_table_column_data_types() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_type_inference(); + RETURN NEXT is( + msar.infer_table_column_data_types('"Types Test"'::regclass), + jsonb_build_object( + 1, 'integer', + 2, 'text', + 3, 'boolean', + 4, 'date', + 5, 'numeric', + 6, 'interval', + 7, 'text' + ) + ); +END; +$f$ LANGUAGE plpgsql; + + +-- msar.add_mathesar_table + +CREATE OR REPLACE FUNCTION __setup_create_table() RETURNS SETOF TEXT AS $f$ +BEGIN + CREATE SCHEMA tab_create_schema; +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_mathesar_table_minimal_id_col() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_create_table(); + PERFORM msar.add_mathesar_table( + 'tab_create_schema'::regnamespace::oid, 'anewtable', null, null, null, null + ); + RETURN NEXT col_is_pk( + 'tab_create_schema', 'anewtable', 'id', 'id column should be pkey' + ); + RETURN NEXT results_eq( + $q$SELECT attidentity + FROM pg_attribute + WHERE attrelid='tab_create_schema.anewtable'::regclass::oid and attname='id'$q$, + $v$VALUES ('d'::"char")$v$, + 'id column should be generated always as identity' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_mathesar_table_badname() RETURNS SETOF TEXT AS $f$ +DECLARE + badname text := '"new"''dsf'' \t"'; +BEGIN + PERFORM __setup_create_table(); + PERFORM msar.add_mathesar_table( + 'tab_create_schema'::regnamespace::oid, badname, null, null, null, null + ); + RETURN NEXT has_table('tab_create_schema'::name, badname::name); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_mathesar_table_noname() RETURNS SETOF TEXT AS $f$ +DECLARE + generated_name text := 'Table 1'; +BEGIN + PERFORM __setup_create_table(); + PERFORM msar.add_mathesar_table( + 'tab_create_schema'::regnamespace::oid, null, null, null, null, null + ); + RETURN NEXT has_table('tab_create_schema'::name, generated_name::name); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_mathesar_table_noname_avoid_collision() +RETURNS SETOF TEXT AS $f$ +DECLARE + generated_name text := 'Table 3'; +BEGIN + PERFORM __setup_create_table(); + PERFORM msar.add_mathesar_table( + 'tab_create_schema'::regnamespace::oid, null, null, null, null, null + ); + PERFORM msar.add_mathesar_table( + 'tab_create_schema'::regnamespace::oid, null, null, null, null, null + ); + RETURN NEXT has_table('tab_create_schema'::name, 'Table 1'::name); + RETURN NEXT has_table('tab_create_schema'::name, 'Table 2'::name); + PERFORM msar.drop_table( + sch_name => 'tab_create_schema', + tab_name => 'Table 1', + cascade_ => false, + if_exists => false + ); + RETURN NEXT hasnt_table('tab_create_schema'::name, 'Table 1'::name); + PERFORM msar.add_mathesar_table( + 'tab_create_schema'::regnamespace::oid, null, null, null, null, null + ); + RETURN NEXT has_table('tab_create_schema'::name, generated_name::name); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_mathesar_table_columns() RETURNS SETOF TEXT AS $f$ +DECLARE + col_defs jsonb := $j$[ + {"name": "mycolumn", "type": {"name": "numeric"}}, + {}, + {"type": {"name": "varchar", "options": {"length": 128}}} + ]$j$; +BEGIN + PERFORM __setup_create_table(); + PERFORM msar.add_mathesar_table( + 'tab_create_schema'::regnamespace::oid, + 'cols_table', + col_defs, + null, null, null + ); + RETURN NEXT col_is_pk( + 'tab_create_schema', 'cols_table', 'id', 'id column should be pkey' + ); + RETURN NEXT col_type_is( + 'tab_create_schema'::name, 'cols_table'::name, 'mycolumn'::name, 'numeric' + ); + RETURN NEXT col_type_is( + 'tab_create_schema'::name, 'cols_table'::name, 'Column 3'::name, 'character varying(128)' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_get_preview() RETURNS SETOF TEXT AS $f$ +DECLARE + col_cast_def jsonb := $j$[ + { + "attnum": 1, + "type": {"name": "integer"} + }, + { + "attnum":2, + "type": {"name": "numeric", "options": {"precision":5, "scale":2}} + } + ]$j$; + want_records jsonb := $j$[ + {"id": 1, "length": 2.00}, + {"id": 2, "length": 3.00}, + {"id": 3, "length": 4.00}, + {"id": 4, "length": 5.22} + ] + $j$; + have_records jsonb; +BEGIN + PERFORM __setup_create_table(); + CREATE TABLE tab_create_schema.foo(id INTEGER GENERATED BY DEFAULT AS IDENTITY, length FLOAT8); + INSERT INTO tab_create_schema.foo(length) VALUES (2), (3), (4), (5.2225); + have_records := msar.get_preview( + tab_id => 'tab_create_schema.foo'::regclass::oid, + col_cast_def => col_cast_def, + rec_limit => NULL + ); + RETURN NEXT is(have_records, want_records); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_mathesar_table_comment() RETURNS SETOF TEXT AS $f$ +DECLARE + comment_ text := $c$my "Super;";'; DROP SCHEMA tab_create_schema;'$c$; +BEGIN + PERFORM __setup_create_table(); + PERFORM msar.add_mathesar_table( + 'tab_create_schema'::regnamespace::oid, 'cols_table', null, null, null, comment_ + ); + RETURN NEXT col_is_pk( + 'tab_create_schema', 'cols_table', 'id', 'id column should be pkey' + ); + RETURN NEXT is( + obj_description('tab_create_schema.cols_table'::regclass::oid), + comment_, + 'created table should have specified description (comment)' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_column_alter() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE col_alters ( + id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + col1 text NOT NULL, + col2 numeric DEFAULT 5, + "Col sp" text, + col_opts numeric(5, 3), + coltim timestamp DEFAULT now() + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_process_col_alter_jsonb() RETURNS SETOF TEXT AS $f$/* +These don't actually modify the table, so we can run multiple tests in the same test. + +Only need to test null/empty behavior here, since main functionality is tested by testing +msar.alter_columns + +It's debatable whether this test should continue to exist, but it was useful for initial +development, and runs quickly. +*/ +DECLARE + tab_id oid; +BEGIN + PERFORM __setup_column_alter(); + tab_id := 'col_alters'::regclass::oid; + RETURN NEXT is(msar.process_col_alter_jsonb(tab_id, '[{"attnum": 2}]'), null); + RETURN NEXT is(msar.process_col_alter_jsonb(tab_id, '[{"attnum": 2, "name": "blah"}]'), null); + RETURN NEXT is(msar.process_col_alter_jsonb(tab_id, '[]'), null); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_single_name() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := '[{"attnum": 2, "name": "blah"}]'; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2]); + RETURN NEXT columns_are( + 'col_alters', + ARRAY['id', 'blah', 'col2', 'Col sp', 'col_opts', 'coltim'] + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_multi_names() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + {"attnum": 2, "name": "new space"}, + {"attnum": 4, "name": "nospace"} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 4]); + RETURN NEXT columns_are( + 'col_alters', + ARRAY['id', 'new space', 'col2', 'nospace', 'col_opts', 'coltim'] + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_type() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + {"attnum": 2, "type": {"name": "varchar", "options": {"length": 48}}}, + {"attnum": 3, "type": {"name": "integer"}}, + {"attnum": 4, "type": {"name": "integer"}} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 3, 4]); + RETURN NEXT col_type_is('col_alters', 'col1', 'character varying(48)'); + RETURN NEXT col_type_is('col_alters', 'col2', 'integer'); + RETURN NEXT col_default_is('col_alters', 'col2', 5); + RETURN NEXT col_type_is('col_alters', 'Col sp', 'integer'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_type_options() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + {"attnum": 5, "type": {"options": {"precision": 4}}} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[5]); + RETURN NEXT col_type_is('col_alters', 'col_opts', 'numeric(4,0)'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_drop() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + {"attnum": 2, "delete": true}, + {"attnum": 5, "delete": true} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 5]); + RETURN NEXT columns_are('col_alters', ARRAY['id', 'col2', 'Col sp', 'coltim']); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_nullable() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + {"attnum": 2, "not_null": false}, + {"attnum": 5, "not_null": true} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 5]); + RETURN NEXT col_is_null('col_alters', 'col1'); + RETURN NEXT col_not_null('col_alters', 'col_opts'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_leaves_defaults() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + {"attnum": 3, "type": {"name": "integer"}}, + {"attnum": 6, "type": {"name": "date"}} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[3, 6]); + RETURN NEXT col_default_is('col_alters', 'col2', '5'); + RETURN NEXT col_default_is('col_alters', 'coltim', '(now())::date'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_drops_defaults() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + {"attnum": 3, "default": null}, + {"attnum": 6, "type": {"name": "date"}, "default": null} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[3, 6]); + RETURN NEXT col_hasnt_default('col_alters', 'col2'); + RETURN NEXT col_hasnt_default('col_alters', 'coltim'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_sets_defaults() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + {"attnum": 2, "default": "test34"}, + {"attnum": 3, "default": 8}, + {"attnum": 5, "type": {"name": "integer"}, "default": 7}, + {"attnum": 6, "type": {"name": "text"}, "default": "test12"} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is( + msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), + ARRAY[2, 3, 5, 6] + ); + RETURN NEXT col_default_is('col_alters', 'col1', 'test34'); + RETURN NEXT col_default_is('col_alters', 'col2', '8'); + RETURN NEXT col_default_is('col_alters', 'col_opts', '7'); + RETURN NEXT col_default_is('col_alters', 'coltim', 'test12'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_alter_columns_combo() RETURNS SETOF TEXT AS $f$ +DECLARE + col_alters_jsonb jsonb := $j$[ + { + "attnum": 2, + "name": "nullab numeric", + "not_null": false, + "type": {"name": "numeric", "options": {"precision": 8, "scale": 4}}, + "description": "This is; a comment with a semicolon!" + }, + {"attnum": 3, "name": "newcol2"}, + {"attnum": 4, "delete": true}, + {"attnum": 5, "not_null": true}, + {"attnum": 6, "name": "timecol", "not_null": true} + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is( + msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 3, 4, 5, 6] + ); + RETURN NEXT columns_are( + 'col_alters', ARRAY['id', 'nullab numeric', 'newcol2', 'col_opts', 'timecol'] + ); + RETURN NEXT col_is_null('col_alters', 'nullab numeric'); + RETURN NEXT col_type_is('col_alters', 'nullab numeric', 'numeric(8,4)'); + -- This test checks that nothing funny happened when dropping column 4 + RETURN NEXT col_type_is('col_alters', 'col_opts', 'numeric(5,3)'); + RETURN NEXT col_not_null('col_alters', 'col_opts'); + RETURN NEXT col_not_null('col_alters', 'timecol'); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), 'This is; a comment with a semicolon!'); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 3), NULL); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_comment_on_column() RETURNS SETOF TEXT AS $$ +DECLARE + change1 jsonb := $j$[ + { + "attnum": 2, + "description": "change1col2description" + }, + { + "attnum": 3, + "name": "change1col3name" + } + ]$j$; + change2 jsonb := $j$[ + { + "attnum": 2, + "description": "change2col2description" + }, + { + "attnum": 3, + "description": "change2col3description" + } + ]$j$; + -- Below change should not affect the description. + change3 jsonb := $j$[ + { + "attnum": 2, + "name": "change3col2name" + }, + { + "attnum": 3, + "name": "change3col3name" + } + ]$j$; + change4 jsonb := $j$[ + { + "attnum": 2, + "name": "change4col2name", + "description": null + }, + { + "attnum": 3, + "name": "change4col3name" + } + ]$j$; +BEGIN + PERFORM __setup_column_alter(); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), NULL); + PERFORM msar.alter_columns('col_alters'::regclass::oid, change1); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), 'change1col2description'); + PERFORM msar.alter_columns('col_alters'::regclass::oid, change2); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), 'change2col2description'); + PERFORM msar.alter_columns('col_alters'::regclass::oid, change3); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), 'change2col2description'); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 3), 'change2col3description'); + PERFORM msar.alter_columns('col_alters'::regclass::oid, change4); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), NULL); + RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 3), 'change2col3description'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_roster() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE "Roster" ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "Student Name" text, + "Teacher" text, + "Teacher Email" text, + "Subject" varchar(20), + "Grade" integer +); +INSERT INTO "Roster" + ("Student Name", "Teacher", "Teacher Email", "Subject", "Grade") +VALUES + ('Stephanie Norris', 'James Jones', 'jamesjones@gmail.com', 'Physics', 43), + ('Stephanie Norris', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 37), + ('Stephanie Norris', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 55), + ('Stephanie Norris', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 41), + ('Stephanie Norris', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 62), + ('Shannon Ramos', 'James Jones', 'jamesjones@gmail.com', 'Math', 44), + ('Shannon Ramos', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 56), + ('Shannon Ramos', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 31), + ('Shannon Ramos', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 77), + ('Shannon Ramos', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 40), + ('Tyler Harris', 'James Jones', 'jamesjones@gmail.com', 'Math', 92), + ('Tyler Harris', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 87), + ('Tyler Harris', 'Brett Bennett', 'brettbennett@gmail.com', 'Reading', 30), + ('Tyler Harris', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 66), + ('Tyler Harris', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 81), + ('Lee Henderson', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 59), + ('Lee Henderson', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 33), + ('Lee Henderson', 'Brett Bennett', 'brettbennett@gmail.com', 'Reading', 82), + ('Lee Henderson', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 95), + ('Lee Henderson', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 93), + ('Amber Swanson', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 67), + ('Amber Swanson', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 62), + ('Amber Swanson', 'Julie Garza', 'juliegarza@yahoo.com', 'Math', 65), + ('Amber Swanson', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 47), + ('Amber Swanson', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 44), + ('Jeffrey Juarez', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 65), + ('Jeffrey Juarez', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 57), + ('Jeffrey Juarez', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 47), + ('Jeffrey Juarez', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Biology', 73), + ('Jeffrey Juarez', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 49), + ('Jennifer Carlson', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 61), + ('Jennifer Carlson', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 68), + ('Jennifer Carlson', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 68), + ('Jennifer Carlson', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 43), + ('Jennifer Carlson', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 80), + ('Chelsea Smith', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 37), + ('Chelsea Smith', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 95), + ('Chelsea Smith', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 49), + ('Chelsea Smith', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 75), + ('Chelsea Smith', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 100), + ('Dana Webb', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Biology', 87), + ('Dana Webb', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 87), + ('Dana Webb', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 78), + ('Dana Webb', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 34), + ('Dana Webb', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 83), + ('Philip Taylor', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 39), + ('Philip Taylor', 'Brett Bennett', 'brettbennett@gmail.com', 'Reading', 48), + ('Philip Taylor', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 84), + ('Philip Taylor', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 26), + ('Philip Taylor', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 92), + ('Christopher Bell', 'Danny Davis', 'dannydavis@hotmail.com', 'Writing', 96), + ('Christopher Bell', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 74), + ('Christopher Bell', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 64), + ('Christopher Bell', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 83), + ('Christopher Bell', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 90), + ('Stacy Barnett', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 55), + ('Stacy Barnett', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 99), + ('Stacy Barnett', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 70), + ('Stacy Barnett', 'Teresa Chambers', 'teresachambers@gmail.com', 'Physics', 78), + ('Stacy Barnett', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'P.E.', 72), + ('Mary Carroll', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 73), + ('Mary Carroll', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 87), + ('Mary Carroll', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 37), + ('Mary Carroll', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 98), + ('Mary Carroll', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 57), + ('Susan Hoover', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 41), + ('Susan Hoover', 'Brett Bennett', 'brettbennett@gmail.com', 'Reading', 77), + ('Susan Hoover', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 48), + ('Susan Hoover', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 41), + ('Susan Hoover', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 89), + ('Jennifer Park', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 96), + ('Jennifer Park', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 25), + ('Jennifer Park', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 43), + ('Jennifer Park', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Biology', 50), + ('Jennifer Park', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 82), + ('Jennifer Ortiz', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 94), + ('Jennifer Ortiz', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 26), + ('Jennifer Ortiz', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 28), + ('Jennifer Ortiz', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 33), + ('Jennifer Ortiz', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 98), + ('Robert Lamb', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 89), + ('Robert Lamb', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 99), + ('Robert Lamb', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 55), + ('Robert Lamb', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 32), + ('Robert Lamb', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Biology', 83), + ('Judy Martinez', 'Danny Davis', 'dannydavis@hotmail.com', 'Writing', 99), + ('Judy Martinez', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 59), + ('Judy Martinez', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 66), + ('Judy Martinez', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'P.E.', 83), + ('Judy Martinez', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 75), + ('Christy Meyer', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 60), + ('Christy Meyer', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 90), + ('Christy Meyer', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 72), + ('Christy Meyer', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 37), + ('Christy Meyer', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 78), + ('Evelyn Anderson', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 64), + ('Evelyn Anderson', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'History', 68), + ('Evelyn Anderson', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 49), + ('Evelyn Anderson', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 42), + ('Evelyn Anderson', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 95), + ('Bethany Bell', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 36), + ('Bethany Bell', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 62), + ('Bethany Bell', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 50), + ('Bethany Bell', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 93), + ('Bethany Bell', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 73), + ('Leslie Hart', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 45), + ('Leslie Hart', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 79), + ('Leslie Hart', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 57), + ('Leslie Hart', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 76), + ('Leslie Hart', 'James Jones', 'jamesjones@gmail.com', 'Math', 75), + ('Carolyn Durham', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'P.E.', 60), + ('Carolyn Durham', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 28), + ('Carolyn Durham', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 25), + ('Carolyn Durham', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 49), + ('Carolyn Durham', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 69), + ('Daniel Martin', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 60), + ('Daniel Martin', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 32), + ('Daniel Martin', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 75), + ('Daniel Martin', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 78), + ('Daniel Martin', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 74), + ('Jessica Jackson', 'Danny Davis', 'dannydavis@hotmail.com', 'Writing', 34), + ('Jessica Jackson', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 78), + ('Jessica Jackson', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 67), + ('Jessica Jackson', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 68), + ('Jessica Jackson', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 88), + ('Stephanie Mendez', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 93), + ('Stephanie Mendez', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 73), + ('Stephanie Mendez', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 27), + ('Stephanie Mendez', 'Teresa Chambers', 'teresachambers@gmail.com', 'Physics', 41), + ('Stephanie Mendez', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 98), + ('Kevin Griffith', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 54), + ('Kevin Griffith', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 57), + ('Kevin Griffith', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 92), + ('Kevin Griffith', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 82), + ('Kevin Griffith', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 48), + ('Debra Johnson', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 38), + ('Debra Johnson', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 44), + ('Debra Johnson', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'History', 32), + ('Debra Johnson', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 32), + ('Debra Johnson', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 41), + ('Mark Frazier', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 78), + ('Mark Frazier', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 25), + ('Mark Frazier', 'Julie Garza', 'juliegarza@yahoo.com', 'Math', 93), + ('Mark Frazier', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 98), + ('Mark Frazier', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Music', 75), + ('Jessica Jones', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 34), + ('Jessica Jones', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 46), + ('Jessica Jones', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 95), + ('Jessica Jones', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 41), + ('Jessica Jones', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 97), + ('Brandon Robinson', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'P.E.', 38), + ('Brandon Robinson', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 64), + ('Brandon Robinson', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 53), + ('Brandon Robinson', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 56), + ('Brandon Robinson', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 39), + ('Timothy Lowe', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 43), + ('Timothy Lowe', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 74), + ('Timothy Lowe', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 62), + ('Timothy Lowe', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 99), + ('Timothy Lowe', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 76), + ('Samantha Rivera', 'James Jones', 'jamesjones@gmail.com', 'Math', 38), + ('Samantha Rivera', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 34), + ('Samantha Rivera', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 55), + ('Samantha Rivera', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'P.E.', 91), + ('Samantha Rivera', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 35), + ('Matthew Brown', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 37), + ('Matthew Brown', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 59), + ('Matthew Brown', 'James Jones', 'jamesjones@gmail.com', 'Math', 83), + ('Matthew Brown', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 100), + ('Matthew Brown', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 40), + ('Mary Gonzalez', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 30), + ('Mary Gonzalez', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 50), + ('Mary Gonzalez', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'History', 52), + ('Mary Gonzalez', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 94), + ('Mary Gonzalez', 'James Jones', 'jamesjones@gmail.com', 'Physics', 39), + ('Mr. Patrick Weber MD', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'P.E.', 58), + ('Mr. Patrick Weber MD', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 31), + ('Mr. Patrick Weber MD', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 73), + ('Mr. Patrick Weber MD', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 72), + ('Mr. Patrick Weber MD', 'Julie Garza', 'juliegarza@yahoo.com', 'Math', 51), + ('Jill Walker', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 43), + ('Jill Walker', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 80), + ('Jill Walker', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 25), + ('Jill Walker', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 39), + ('Jill Walker', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 70), + ('Jacob Higgins', 'Teresa Chambers', 'teresachambers@gmail.com', 'Physics', 95), + ('Jacob Higgins', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 88), + ('Jacob Higgins', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 47), + ('Jacob Higgins', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 59), + ('Jacob Higgins', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 53), + ('Paula Thompson', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Biology', 52), + ('Paula Thompson', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 42), + ('Paula Thompson', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 98), + ('Paula Thompson', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 28), + ('Paula Thompson', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 53), + ('Tyler Phelps', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 33), + ('Tyler Phelps', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 91), + ('Tyler Phelps', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 81), + ('Tyler Phelps', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 30), + ('Tyler Phelps', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 86), + ('John Schaefer', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 44), + ('John Schaefer', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 69), + ('John Schaefer', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 80), + ('John Schaefer', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 69), + ('John Schaefer', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 45), + ('Eric Kerr', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 45), + ('Eric Kerr', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 90), + ('Eric Kerr', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 50), + ('Eric Kerr', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 92), + ('Eric Kerr', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 77), + ('Mikayla Miller', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 61), + ('Mikayla Miller', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 30), + ('Mikayla Miller', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 88), + ('Mikayla Miller', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Biology', 68), + ('Mikayla Miller', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 41), + ('Alejandro Lam', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 48), + ('Alejandro Lam', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 40), + ('Alejandro Lam', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 40), + ('Alejandro Lam', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 49), + ('Alejandro Lam', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 49), + ('Katelyn Ray', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 60), + ('Katelyn Ray', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 65), + ('Katelyn Ray', 'Julie Garza', 'juliegarza@yahoo.com', 'Math', 82), + ('Katelyn Ray', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 70), + ('Katelyn Ray', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Biology', 59), + ('Carla Rivera', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 67), + ('Carla Rivera', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 70), + ('Carla Rivera', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 94), + ('Carla Rivera', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 36), + ('Carla Rivera', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 51), + ('Larry Alexander', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 57), + ('Larry Alexander', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 97), + ('Larry Alexander', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 89), + ('Larry Alexander', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 66), + ('Larry Alexander', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 92), + ('Michael Knox', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 72), + ('Michael Knox', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 65), + ('Michael Knox', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 49), + ('Michael Knox', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 29), + ('Michael Knox', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 83), + ('Alexander Brown', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Music', 89), + ('Alexander Brown', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 94), + ('Alexander Brown', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 93), + ('Alexander Brown', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 35), + ('Alexander Brown', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 71), + ('Anne Sloan', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 38), + ('Anne Sloan', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 69), + ('Anne Sloan', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 86), + ('Anne Sloan', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 39), + ('Anne Sloan', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'P.E.', 96); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_extract_columns_data() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_roster(); + CREATE TABLE roster_snapshot AS SELECT * FROM "Roster" ORDER BY id; + PERFORM msar.extract_columns_from_table('"Roster"'::regclass::oid, ARRAY[3, 4], 'Teachers', null); + RETURN NEXT columns_are('Teachers', ARRAY['id', 'Teacher', 'Teacher Email']); + RETURN NEXT columns_are('Roster', ARRAY['id', 'Student Name', 'Subject', 'Grade', 'Teachers_id']); + RETURN NEXT fk_ok('Roster', 'Teachers_id', 'Teachers', 'id'); + RETURN NEXT set_eq( + 'SELECT "Teacher", "Teacher Email" FROM "Teachers"', + 'SELECT DISTINCT "Teacher", "Teacher Email" FROM roster_snapshot', + 'Extracted data should be unique tuples' + ); + RETURN NEXT results_eq( + 'SELECT "Student Name", "Subject", "Grade" FROM "Roster" ORDER BY id', + 'SELECT "Student Name", "Subject", "Grade" FROM roster_snapshot ORDER BY id', + 'Remainder data should be unchanged' + ); + RETURN NEXT results_eq( + $q$ + SELECT r.id, "Student Name", "Teacher", "Teacher Email", "Subject", "Grade" + FROM "Roster" r LEFT JOIN "Teachers" t ON r."Teachers_id"=t.id ORDER BY r.id + $q$, + 'SELECT * FROM roster_snapshot ORDER BY id', + 'Joining extracted data should recover original' + ); + RETURN NEXT lives_ok( + $i$ + INSERT INTO "Teachers" ("Teacher", "Teacher Email") VALUES ('Miyagi', 'miyagi@karatekid.com') + $i$, + 'The new id column should be incremented to avoid collision' + ); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_extract_fkey_cols() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE "Referent" ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "Teacher" text, + "Teacher Email" text +); +CREATE TABLE "Referrer" ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "Student Name" text, + "Subject" varchar(20), + "Grade" integer, + "Referent_id" integer REFERENCES "Referent" (id) +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_extract_columns_keeps_fkey() RETURNS SETOF TEXT AS $f$ +BEGIN + PERFORM __setup_extract_fkey_cols(); + PERFORM msar.extract_columns_from_table( + '"Referrer"'::regclass::oid, ARRAY[3, 5], 'Classes', 'Class' + ); + RETURN NEXT columns_are('Referent', ARRAY['id', 'Teacher', 'Teacher Email']); + RETURN NEXT columns_are('Referrer', ARRAY['id', 'Student Name', 'Grade', 'Class']); + RETURN NEXT columns_are('Classes', ARRAY['id', 'Subject', 'Referent_id']); + RETURN NEXT fk_ok('Referrer', 'Class', 'Classes', 'id'); + RETURN NEXT fk_ok('Classes', 'Referent_id', 'Referent', 'id'); +END; +$f$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_dynamic_defaults() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE defaults_test ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + col1 integer DEFAULT 5, + col2 integer DEFAULT 3::integer, + col3 timestamp DEFAULT NOW(), + col4 date DEFAULT '2023-01-01', + col5 date DEFAULT CURRENT_DATE + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_is_possibly_dynamic() RETURNS SETOF TEXT AS $$ +DECLARE + tab_id oid; +BEGIN + PERFORM __setup_dynamic_defaults(); + tab_id := 'defaults_test'::regclass::oid; + RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 1), true); + RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 2), false); + RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 3), false); + RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 4), true); + RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 5), false); + RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 6), true); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_is_pkey_col_tests() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE simple_pkey (col1 text, col2 text PRIMARY KEY, col3 integer); + CREATE TABLE multi_pkey (col1 text, col2 text, col3 integer); + ALTER TABLE multi_pkey ADD PRIMARY KEY (col1, col2); + CREATE TABLE no_pkey (col1 text, col2 text, col3 integer); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_is_pkey_col() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_is_pkey_col_tests(); + RETURN NEXT is(msar.is_pkey_col('simple_pkey'::regclass::oid, 1), false); + RETURN NEXT is(msar.is_pkey_col('simple_pkey'::regclass::oid, 2), true); + RETURN NEXT is(msar.is_pkey_col('simple_pkey'::regclass::oid, 3), false); + RETURN NEXT is(msar.is_pkey_col('multi_pkey'::regclass::oid, 1), true); + RETURN NEXT is(msar.is_pkey_col('multi_pkey'::regclass::oid, 2), true); + RETURN NEXT is(msar.is_pkey_col('multi_pkey'::regclass::oid, 3), false); + RETURN NEXT is(msar.is_pkey_col('no_pkey'::regclass::oid, 1), false); + RETURN NEXT is(msar.is_pkey_col('no_pkey'::regclass::oid, 2), false); + RETURN NEXT is(msar.is_pkey_col('no_pkey'::regclass::oid, 3), false); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_create_role() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM msar.create_role('testuser', 'mypass1234', true); + RETURN NEXT database_privs_are('mathesar_testing', 'testuser', ARRAY['CONNECT', 'TEMPORARY']); + PERFORM msar.create_role( + 'Ro"\bert''); DROP SCHEMA public;', 'my''pass1234"; DROP SCHEMA public;', true + ); + RETURN NEXT has_schema('public'); + RETURN NEXT has_user('Ro"\bert''); DROP SCHEMA public;'); + RETURN NEXT database_privs_are ( + 'mathesar_testing', 'Ro"\bert''); DROP SCHEMA public;', ARRAY['CONNECT', 'TEMPORARY'] + ); + PERFORM msar.create_role('testnopass', null, null); + RETURN NEXT database_privs_are('mathesar_testing', 'testnopass', ARRAY['CONNECT', 'TEMPORARY']); +END; +$$ LANGUAGE plpgsql; + + +-- msar.get_column_info (and related) -------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_manytypes() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE manytypes ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + -- To fend off likely typos, we check many combinations of field and precision settings. + ivl_plain interval, + ivl_yr interval year, + ivl_mo interval month, + ivl_dy interval day, + ivl_hr interval hour, + ivl_mi interval minute, + ivl_se interval second, + ivl_ye_mo interval year to month, + ivl_dy_hr interval day to hour, + ivl_dy_mi interval day to minute, + ivl_dy_se interval day to second, + ivl_hr_mi interval hour to minute, + ivl_hr_se interval hour to second, + ivl_mi_se interval minute to second, + ivl_se_0 interval second(0), + ivl_se_3 interval second(3), + ivl_se_6 interval second(6), + ivl_dy_se0 interval day to second(0), + ivl_dy_se3 interval day to second(3), + ivl_dy_se6 interval day to second(6), + ivl_hr_se0 interval hour to second(0), + ivl_hr_se3 interval hour to second(3), + ivl_hr_se6 interval hour to second(6), + ivl_mi_se0 interval minute to second(0), + ivl_mi_se3 interval minute to second(3), + ivl_mi_se6 interval minute to second(6), + -- Below here is less throrough, more ad-hoc + ivl_plain_arr interval[], + ivl_mi_se6_arr interval minute to second(6)[2][2], + num_plain numeric, + num_8 numeric(8), + num_17_2 numeric(17, 2), + num_plain_arr numeric[], + num_17_2_arr numeric(17, 2)[], + var_plain varchar, + var_16 varchar(16), + var_255 varchar(255), + cha_1 character, + cha_20 character(20), + var_16_arr varchar(16)[], + cha_20_arr character(20)[][], + bit_8 bit(8), + vbt_8 varbit(8), + tim_2 time(2), + ttz_3 timetz(3), + tsp_4 timestamp(4), + tsz_5 timestamptz(5) +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_get_interval_fields() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_manytypes(); + RETURN NEXT results_eq( + $h$ + SELECT msar.get_interval_fields(atttypmod) + FROM pg_attribute + WHERE attrelid='manytypes'::regclass AND atttypid='interval'::regtype + ORDER BY attnum; + $h$, + $w$ + VALUES + (NULL), + ('year'), + ('month'), + ('day'), + ('hour'), + ('minute'), + ('second'), + ('year to month'), + ('day to hour'), + ('day to minute'), + ('day to second'), + ('hour to minute'), + ('hour to second'), + ('minute to second'), + ('second'), + ('second'), + ('second'), + ('day to second'), + ('day to second'), + ('day to second'), + ('hour to second'), + ('hour to second'), + ('hour to second'), + ('minute to second'), + ('minute to second'), + ('minute to second') + $w$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_get_type_options() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_manytypes(); + RETURN NEXT is(msar.get_type_options(atttypid, atttypmod, attndims), NULL) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='id'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"fields": null, "precision": null}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='ivl_plain'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"fields": "day to second", "precision": null}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='ivl_dy_se'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"fields": "second", "precision": 3}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='ivl_se_3'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"fields": "hour to second", "precision": 0}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='ivl_hr_se_0'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"fields": null, "precision": null, "item_type": "interval"}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='ivl_plain_arr'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"fields": "minute to second", "precision": 6, "item_type": "interval"}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='ivl_mi_se6_arr'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": null, "scale": null}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='num_plain'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 8, "scale": 0}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='num_8'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 17, "scale": 2}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='num_17_2'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": null, "scale": null, "item_type": "numeric"}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='num_plain_arr'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 17, "scale": 2, "item_type": "numeric"}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='num_17_2_arr'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"length": null}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='var_plain'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"length": 16}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='var_16'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"length": 255}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='var_255'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"length": 1}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='cha_1'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"length": 20}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='cha_20'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"length": 16, "item_type": "character varying"}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='var_16_arr'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"length": 20, "item_type": "character"}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='cha_20_arr'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 8}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='bit_8'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 8}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='vbt_8'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 2}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='tim_2'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 3}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='ttz_3'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 4}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='tsp_4'; + RETURN NEXT is( + msar.get_type_options(atttypid, atttypmod, attndims), + '{"precision": 5}'::jsonb + ) + FROM pg_attribute WHERE attrelid='manytypes'::regclass AND attname='tsz_5'; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_cast_functions() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE SCHEMA mathesar_types; + CREATE FUNCTION mathesar_types.cast_to_numeric(text) RETURNS numeric AS 'SELECT 5' LANGUAGE SQL; + CREATE FUNCTION mathesar_types.cast_to_text(text) RETURNS text AS 'SELECT ''5''' LANGUAGE SQL; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_get_valid_target_type_strings() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_cast_functions(); + + RETURN NEXT ok(msar.get_valid_target_type_strings('text') @> '["numeric", "text"]'); + RETURN NEXT is(jsonb_array_length(msar.get_valid_target_type_strings('text')), 2); + + RETURN NEXT ok(msar.get_valid_target_type_strings('text'::regtype::oid) @> '["numeric", "text"]'); + RETURN NEXT is(jsonb_array_length(msar.get_valid_target_type_strings('text'::regtype::oid)), 2); + + RETURN NEXT is(msar.get_valid_target_type_strings('interval'), NULL); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_has_dependents() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_extract_fkey_cols(); + RETURN NEXT is(msar.has_dependents('"Referent"'::regclass::oid, 1::smallint), true); + RETURN NEXT is(msar.has_dependents('"Referent"'::regclass::oid, 2::smallint), false); + RETURN NEXT is(msar.has_dependents('"Referrer"'::regclass::oid, 1::smallint), false); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_get_column_info() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_cast_functions(); + CREATE TABLE column_variety ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + num_plain numeric NOT NULL, + var_128 varchar(128), + txt text DEFAULT 'abc', + tst timestamp DEFAULT NOW(), + int_arr integer[4][3], + num_opt_arr numeric(15, 10)[] + ); + COMMENT ON COLUMN column_variety.txt IS 'A super comment ;'; + CREATE TABLE needs_cv ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + cv_id integer REFERENCES column_variety(id) + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_get_column_info() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_get_column_info(); + RETURN NEXT is( + msar.get_column_info('column_variety'), + $j$[ + { + "id": 1, + "name": "id", + "type": "integer", + "default": { + "value": "identity", + "is_dynamic": true + }, + "nullable": false, + "description": null, + "primary_key": true, + "type_options": null, + "has_dependents": true, + "current_role_priv": ["SELECT", "INSERT", "UPDATE", "REFERENCES"], + "valid_target_types": null + }, + { + "id": 2, + "name": "num_plain", + "type": "numeric", + "default": null, + "nullable": false, + "description": null, + "primary_key": false, + "type_options": { + "scale": null, + "precision": null + }, + "has_dependents": false, + "current_role_priv": ["SELECT", "INSERT", "UPDATE", "REFERENCES"], + "valid_target_types": null + }, + { + "id": 3, + "name": "var_128", + "type": "character varying", + "default": null, + "nullable": true, + "description": null, + "primary_key": false, + "type_options": { + "length": 128 + }, + "has_dependents": false, + "current_role_priv": ["SELECT", "INSERT", "UPDATE", "REFERENCES"], + "valid_target_types": null + }, + { + "id": 4, + "name": "txt", + "type": "text", + "default": { + "value": "'abc'::text", + "is_dynamic": false + }, + "nullable": true, + "description": "A super comment ;", + "primary_key": false, + "type_options": null, + "has_dependents": false, + "current_role_priv": ["SELECT", "INSERT", "UPDATE", "REFERENCES"], + "valid_target_types": ["numeric", "text"] + }, + { + "id": 5, + "name": "tst", + "type": "timestamp without time zone", + "default": { + "value": "now()", + "is_dynamic": true + }, + "nullable": true, + "description": null, + "primary_key": false, + "type_options": { + "precision": null + }, + "has_dependents": false, + "current_role_priv": ["SELECT", "INSERT", "UPDATE", "REFERENCES"], + "valid_target_types": null + }, + { + "id": 6, + "name": "int_arr", + "type": "_array", + "default": null, + "nullable": true, + "description": null, + "primary_key": false, + "type_options": { + "item_type": "integer" + }, + "has_dependents": false, + "current_role_priv": ["SELECT", "INSERT", "UPDATE", "REFERENCES"], + "valid_target_types": null + }, + { + "id": 7, + "name": "num_opt_arr", + "type": "_array", + "default": null, + "nullable": true, + "description": null, + "primary_key": false, + "type_options": { + "scale": 10, + "item_type": "numeric", + "precision": 15 + }, + "has_dependents": false, + "current_role_priv": ["SELECT", "INSERT", "UPDATE", "REFERENCES"], + "valid_target_types": null + } + ]$j$::jsonb + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_get_table_info() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE SCHEMA pi; + -- Two tables with one having description + CREATE TABLE pi.three(id INT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY); + CREATE TABLE pi.one(id INT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY); + COMMENT ON TABLE pi.one IS 'first decimal digit of pi'; + + CREATE SCHEMA alice; + -- No tables in the schema +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_get_table_info() RETURNS SETOF TEXT AS $$ +DECLARE + pi_table_info jsonb; + alice_table_info jsonb; +BEGIN + PERFORM __setup_get_table_info(); + SELECT msar.get_table_info('pi') INTO pi_table_info; + SELECT msar.get_table_info('alice') INTO alice_table_info; + + -- Test table info for schema 'pi' + -- Check if all the required keys exist in the json blob + -- Check whether the correct name is returned + -- Check whether the correct description is returned + RETURN NEXT is( + pi_table_info->0 ?& array['oid', 'name', 'schema', 'description'], true + ); + RETURN NEXT is( + pi_table_info->0->>'name', 'three' + ); + RETURN NEXT is( + pi_table_info->0->>'description', null + ); + + RETURN NEXT is( + pi_table_info->1 ?& array['oid', 'name', 'schema', 'description'], true + ); + RETURN NEXT is( + pi_table_info->1->>'name', 'one' + ); + RETURN NEXT is( + pi_table_info->1->>'description', 'first decimal digit of pi' + ); + + -- Test table info for schema 'alice' that contains no tables + RETURN NEXT is( + alice_table_info, '[]'::jsonb + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_schemas() RETURNS SETOF TEXT AS $$ +DECLARE + initial_schema_count int; + foo_schema jsonb; +BEGIN + -- Get the initial schema count + SELECT jsonb_array_length(msar.list_schemas()) INTO initial_schema_count; + + -- Create a schema + CREATE SCHEMA foo; + -- We should now have one additional schema + RETURN NEXT is(jsonb_array_length(msar.list_schemas()), initial_schema_count + 1); + -- Reflect the "foo" schema + SELECT jsonb_path_query(msar.list_schemas(), '$[*] ? (@.name == "foo")') INTO foo_schema; + -- We should have a foo schema object + RETURN NEXT is(jsonb_typeof(foo_schema), 'object'); + -- It should have no description + RETURN NEXT is(jsonb_typeof(foo_schema->'description'), 'null'); + -- It should have no tables + RETURN NEXT is((foo_schema->'table_count')::int, 0); + + -- And comment + COMMENT ON SCHEMA foo IS 'A test schema'; + -- Create two tables + CREATE TABLE foo.test_table_1 (id serial PRIMARY KEY); + CREATE TABLE foo.test_table_2 (id serial PRIMARY KEY); + -- Reflect again + SELECT jsonb_path_query(msar.list_schemas(), '$[*] ? (@.name == "foo")') INTO foo_schema; + -- We should see the description we set + RETURN NEXT is(foo_schema->'description'#>>'{}', 'A test schema'); + -- We should see two tables + RETURN NEXT is((foo_schema->'table_count')::int, 2); + + -- Drop the tables we created + DROP TABLE foo.test_table_1; + DROP TABLE foo.test_table_2; + -- Reflect the "foo" schema + SELECT jsonb_path_query(msar.list_schemas(), '$[*] ? (@.name == "foo")') INTO foo_schema; + -- The "foo" schema should now have no tables + RETURN NEXT is((foo_schema->'table_count')::int, 0); + + -- Drop the "foo" schema + DROP SCHEMA foo; + -- We should now have no "foo" schema + RETURN NEXT ok(NOT jsonb_path_exists(msar.list_schemas(), '$[*] ? (@.name == "foo")')); + -- We should see the initial schema count again + RETURN NEXT is(jsonb_array_length(msar.list_schemas()), initial_schema_count); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_schema_privileges_basic() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE SCHEMA restricted; +RETURN NEXT is( + msar.list_schema_privileges('restricted'::regnamespace), + format('[{"direct": ["USAGE", "CREATE"], "role_oid": %s}]', 'mathesar'::regrole::oid)::jsonb, + 'Initially, only privileges for creator' +); +CREATE USER "Alice"; +RETURN NEXT is( + msar.list_schema_privileges('restricted'::regnamespace), + format('[{"direct": ["USAGE", "CREATE"], "role_oid": %s}]', 'mathesar'::regrole::oid)::jsonb, + 'Alice should not have any privileges' +); +GRANT USAGE ON SCHEMA restricted TO "Alice"; +RETURN NEXT is( + msar.list_schema_privileges('restricted'::regnamespace), + format( + '[{"direct": ["USAGE", "CREATE"], "role_oid": %1$s}, {"direct": ["USAGE"], "role_oid": %2$s}]', + 'mathesar'::regrole::oid, + '"Alice"'::regrole::oid + )::jsonb, + 'Alice should have her schema new privileges' +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_table_privileges_basic() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE restricted_table(); +RETURN NEXT is( + msar.list_table_privileges('restricted_table'::regclass), + format( + '[{"direct": ["INSERT", "SELECT", "UPDATE", "DELETE", "TRUNCATE", "REFERENCES", "TRIGGER"], "role_oid": %s}]', + 'mathesar'::regrole::oid + )::jsonb, + 'Initially, only privileges for creator' +); +CREATE USER "Alice"; +RETURN NEXT is( + msar.list_table_privileges('restricted_table'::regclass), + format( + '[{"direct": ["INSERT", "SELECT", "UPDATE", "DELETE", "TRUNCATE", "REFERENCES", "TRIGGER"], "role_oid": %s}]', + 'mathesar'::regrole::oid + )::jsonb, + 'Alice should not have any privileges on restricted_table' +); +GRANT SELECT, DELETE ON TABLE restricted_table TO "Alice"; +RETURN NEXT is( + msar.list_table_privileges('restricted_table'::regclass), + format( + '[{"direct": ["INSERT", "SELECT", "UPDATE", "DELETE", "TRUNCATE", "REFERENCES", "TRIGGER"], "role_oid": %1$s}, + {"direct": ["SELECT", "DELETE"], "role_oid": %2$s}]', + 'mathesar'::regrole::oid, + '"Alice"'::regrole::oid + )::jsonb, + 'Alice should have SELECT & DELETE privileges on restricted_table' +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_roles() RETURNS SETOF TEXT AS $$ +DECLARE + initial_role_count int; + foo_role jsonb; + bar_role jsonb; +BEGIN + SELECT jsonb_array_length(msar.list_roles()) INTO initial_role_count; + + -- Create role and check if role is present in response & count is increased + CREATE ROLE foo; + RETURN NEXT is(jsonb_array_length(msar.list_roles()), initial_role_count + 1); + SELECT jsonb_path_query(msar.list_roles(), '$[*] ? (@.name == "foo")') INTO foo_role; + + -- Check if role has expected properties + RETURN NEXT is(jsonb_typeof(foo_role), 'object'); + RETURN NEXT is((foo_role->>'super')::boolean, false); + RETURN NEXT is((foo_role->>'inherits')::boolean, true); + RETURN NEXT is((foo_role->>'create_role')::boolean, false); + RETURN NEXT is((foo_role->>'create_db')::boolean, false); + RETURN NEXT is((foo_role->>'login')::boolean, false); + RETURN NEXT is(jsonb_typeof(foo_role->'description'), 'null'); + RETURN NEXT is(jsonb_typeof(foo_role->'members'), 'null'); + + -- Modify properties and check role again + ALTER ROLE foo WITH CREATEDB CREATEROLE LOGIN NOINHERIT; + SELECT jsonb_path_query(msar.list_roles(), '$[*] ? (@.name == "foo")') INTO foo_role; + RETURN NEXT is((foo_role->>'super')::boolean, false); + RETURN NEXT is((foo_role->>'inherits')::boolean, false); + RETURN NEXT is((foo_role->>'create_role')::boolean, true); + RETURN NEXT is((foo_role->>'create_db')::boolean, true); + RETURN NEXT is((foo_role->>'login')::boolean, true); + + -- Add comment and check if comment is present + COMMENT ON ROLE foo IS 'A test role'; + SELECT jsonb_path_query(msar.list_roles(), '$[*] ? (@.name == "foo")') INTO foo_role; + RETURN NEXT is(foo_role->'description'#>>'{}', 'A test role'); + + -- Add members and check result + CREATE ROLE bar; + GRANT foo TO bar; + RETURN NEXT is(jsonb_array_length(msar.list_roles()), initial_role_count + 2); + SELECT jsonb_path_query(msar.list_roles(), '$[*] ? (@.name == "foo")') INTO foo_role; + SELECT jsonb_path_query(msar.list_roles(), '$[*] ? (@.name == "bar")') INTO bar_role; + RETURN NEXT is(jsonb_typeof(foo_role->'members'), 'array'); + RETURN NEXT is( + foo_role->'members'->0->>'oid', bar_role->>'oid' + ); + DROP ROLE bar; + + -- Drop role and ensure role is not present in response + DROP ROLE foo; + RETURN NEXT ok(NOT jsonb_path_exists(msar.list_roles(), '$[*] ? (@.name == "foo")')); +END; +$$ LANGUAGE plpgsql; + + +-- msar.format_data -------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION test_format_data() RETURNS SETOF TEXT AS $$ +BEGIN + RETURN NEXT is(msar.format_data('3 Jan, 2021'::date), '2021-01-03 AD'); + RETURN NEXT is(msar.format_data('3 Jan, 23 BC'::date), '0023-01-03 BC'); + RETURN NEXT is(msar.format_data('1 day'::interval), 'P0Y0M1DT0H0M0S'); + RETURN NEXT is( + msar.format_data('1 year 2 months 3 days 4 hours 5 minutes 6 seconds'::interval), + 'P1Y2M3DT4H5M6S' + ); + RETURN NEXT is(msar.format_data('1 day 3 hours ago'::interval), 'P0Y0M-1DT-3H0M0S'); + RETURN NEXT is(msar.format_data('1 day -3 hours'::interval), 'P0Y0M1DT-3H0M0S'); + RETURN NEXT is( + msar.format_data('1 year -1 month 3 days 14 hours -10 minutes 30.4 seconds'::interval), + 'P0Y11M3DT13H50M30.4S' + ); + RETURN NEXT is( + msar.format_data('1 year -1 month 3 days 14 hours -10 minutes 30.4 seconds ago'::interval), + 'P0Y-11M-3DT-13H-50M-30.4S' + ); + RETURN NEXT is(msar.format_data('45 hours 70 seconds'::interval), 'P0Y0M0DT45H1M10S'); + RETURN NEXT is( + msar.format_data('5 decades 22 years 14 months 1 week 3 days'::interval), + 'P73Y2M10DT0H0M0S' + ); + RETURN NEXT is(msar.format_data('1 century'::interval), 'P100Y0M0DT0H0M0S'); + RETURN NEXT is(msar.format_data('2 millennia'::interval), 'P2000Y0M0DT0H0M0S'); + RETURN NEXT is(msar.format_data('12:30:45+05:30'::time with time zone), '12:30:45.0+05:30'); + RETURN NEXT is(msar.format_data('12:30:45'::time with time zone), '12:30:45.0Z'); + RETURN NEXT is( + msar.format_data('12:30:45.123456-08'::time with time zone), '12:30:45.123456-08:00' + ); + RETURN NEXT is(msar.format_data('12:30'::time without time zone), '12:30:00.0'); + RETURN NEXT is( + msar.format_data('30 July, 2000 19:15:03.65'::timestamp with time zone), + '2000-07-30T19:15:03.65Z AD' + ); + RETURN NEXT is( + msar.format_data('10000-01-01 00:00:00'::timestamp with time zone), + '10000-01-01T00:00:00.0Z AD' + ); + RETURN NEXT is( + msar.format_data('3 March, 25 BC, 17:30:15+01'::timestamp with time zone), + '0025-03-03T16:30:15.0Z BC' + ); + RETURN NEXT is( + msar.format_data('17654-03-02 01:00:00'::timestamp without time zone), + '17654-03-02T01:00:00.0 AD' + ); +END; +$$ LANGUAGE plpgsql; + +-- msar.list_records_from_table -------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_list_records_table() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE atable ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + col1 integer, + col2 varchar, + col3 json, + col4 jsonb, + coltodrop integer + ); + ALTER TABLE atable DROP COLUMN coltodrop; + INSERT INTO atable (col1, col2, col3, col4) VALUES + (5, 'sdflkj', '"s"', '{"a": "val"}'), + (34, 'sdflfflsk', null, '[1, 2, 3, 4]'), + (2, 'abcde', '{"k": 3242348}', 'true'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_customers_table() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE "Customers" ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "First Name" text, + "Last Name" text, + "Subscription Date" date, + "dropmeee 1" text +); +ALTER TABLE "Customers" DROP COLUMN "dropmeee 1"; +INSERT INTO "Customers" ("First Name", "Last Name", "Subscription Date") VALUES + ('Aaron', 'Adams', '2020-03-21'), -- 1 + ('Abigail', 'Acosta', '2020-04-16'), -- 2 + ('Aaron', 'Adams', '2020-04-29'), -- 3 + ('Abigail', 'Adams', '2020-05-29'), -- 4 + ('Abigail', 'Abbott', '2020-07-05'), -- 5 + ('Aaron', 'Adkins', '2020-08-16'), -- 6 + ('Aaron', 'Acevedo', '2020-10-29'), -- 7 + ('Abigail', 'Abbott', '2020-10-30'), -- 8 + ('Abigail', 'Adams', '2021-02-14'), -- 9 + ('Abigail', 'Acevedo', '2021-03-29'), -- 10 + ('Aaron', 'Acosta', '2021-04-13'), -- 11 + ('Aaron', 'Adams', '2021-06-30'), -- 12 + ('Abigail', 'Adkins', '2021-09-12'), -- 13 + ('Aaron', 'Adams', '2021-11-11'), -- 14 + ('Abigail', 'Abbott', '2021-11-30'), -- 15 + ('Aaron', 'Acevedo', '2022-02-04'), -- 16 + ('Aaron', 'Adkins', '2022-03-10'), -- 17 + ('Abigail', 'Abbott', '2022-03-23'), -- 18 + ('Abigail', 'Adkins', '2022-03-27'), -- 19 + ('Abigail', 'Abbott', '2022-04-29'), -- 20 + ('Abigail', 'Adams', '2022-05-24'); -- 21 +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_table_with_self_referential_fk() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE categories ( + id serial primary key, + name TEXT, + parent INT REFERENCES categories(id) +); +INSERT INTO categories (id, parent, name) VALUES +( 1, NULL, 'Tools'), +( 2, 1 , 'Power tools'), +( 3, 1 , 'Hand tools'), +( 4, 2 , 'Drills'), +( 5, 3 , 'Screwdrivers'), +( 6, 3 , 'Wrenches'); +-- Reset sequence: +PERFORM setval('categories_id_seq', (SELECT max(id) FROM categories)); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_records_from_table() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => null, + offset_ => null, + order_ => null, + filter_ => null, + group_ => null + ), + $j${ + "count": 3, + "results": [ + {"1": 1, "2": 5, "3": "sdflkj", "4": "s", "5": {"a": "val"}}, + {"1": 2, "2": 34, "3": "sdflfflsk", "4": null, "5": [1, 2, 3, 4]}, + {"1": 3, "2": 2, "3": "abcde", "4": {"k": 3242348}, "5": true} + ], + "grouping": null, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data(col1) AS "2",' + ' msar.format_data(col2) AS "3", msar.format_data(col3) AS "4",' + ' msar.format_data(col4) AS "5" FROM public.atable' + ' ORDER BY "1" ASC LIMIT NULL OFFSET NULL' + ) + ) + ); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => 2, + offset_ => null, + order_ => '[{"attnum": 2, "direction": "desc"}]', + filter_ => null, + group_ => null + ), + $j${ + "count": 3, + "results": [ + {"1": 2, "2": 34, "3": "sdflfflsk", "4": null, "5": [1, 2, 3, 4]}, + {"1": 1, "2": 5, "3": "sdflkj", "4": "s", "5": {"a": "val"}} + ], + "grouping": null, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data(col1) AS "2",' + ' msar.format_data(col2) AS "3", msar.format_data(col3) AS "4",' + ' msar.format_data(col4) AS "5" FROM public.atable' + ' ORDER BY "2" DESC, "1" ASC LIMIT ''2'' OFFSET NULL' + ) + ) + ); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => null, + offset_ => 1, + order_ => '[{"attnum": 1, "direction": "desc"}]', + filter_ => null, + group_ => null + ), + $j${ + "count": 3, + "results": [ + {"1": 2, "2": 34, "3": "sdflfflsk", "4": null, "5": [1, 2, 3, 4]}, + {"1": 1, "2": 5, "3": "sdflkj", "4": "s", "5": {"a": "val"}} + ], + "grouping": null, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data(col1) AS "2",', + ' msar.format_data(col2) AS "3", msar.format_data(col3) AS "4",', + ' msar.format_data(col4) AS "5" FROM public.atable', + ' ORDER BY "1" DESC, "1" ASC LIMIT NULL OFFSET ''1''' + ) + ) + ); + CREATE ROLE intern_no_pkey; + GRANT USAGE ON SCHEMA msar, __msar TO intern_no_pkey; + GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar TO intern_no_pkey; + GRANT SELECT (col1, col2, col3, col4) ON TABLE atable TO intern_no_pkey; + SET ROLE intern_no_pkey; + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => null, + offset_ => null, + order_ => null, + filter_ => null, + group_ => null + ), + $j${ + "count": 3, + "results": [ + {"2": 2, "3": "abcde", "4": {"k": 3242348}, "5": true}, + {"2": 5, "3": "sdflkj", "4": "s", "5": {"a": "val"}}, + {"2": 34, "3": "sdflfflsk", "4": null, "5": [1, 2, 3, 4]} + ], + "grouping": null, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(col1) AS "2", msar.format_data(col2) AS "3",', + ' msar.format_data(col3) AS "4", msar.format_data(col4) AS "5" FROM public.atable', + ' ORDER BY "2" ASC, "3" ASC, "5" ASC LIMIT NULL OFFSET NULL' + ) + ) + ); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => null, + offset_ => null, + order_ => '[{"attnum": 3, "direction": "desc"}]', + filter_ => null, + group_ => null + ), + $j${ + "count": 3, + "results": [ + {"2": 5, "3": "sdflkj", "4": "s", "5": {"a": "val"}}, + {"2": 34, "3": "sdflfflsk", "4": null, "5": [1, 2, 3, 4]}, + {"2": 2, "3": "abcde", "4": {"k": 3242348}, "5": true} + ], + "grouping": null, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(col1) AS "2", msar.format_data(col2) AS "3",', + ' msar.format_data(col3) AS "4", msar.format_data(col4) AS "5" FROM public.atable', + ' ORDER BY "3" DESC, "2" ASC, "3" ASC, "5" ASC LIMIT NULL OFFSET NULL' + ) + ) + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_records_with_grouping() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_customers_table(); + rel_id := '"Customers"'::regclass::oid; + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => 10, + offset_ => null, + order_ => '[{"attnum": 3, "direction": "asc"}, {"attnum": 2, "direction": "asc"}]', + filter_ => null, + group_ => '{"columns": [3, 2]}' + ), + $j${ + "count": 21, + "results": [ + {"1": 5, "2": "Abigail", "3": "Abbott", "4": "2020-07-05 AD"}, + {"1": 8, "2": "Abigail", "3": "Abbott", "4": "2020-10-30 AD"}, + {"1": 15, "2": "Abigail", "3": "Abbott", "4": "2021-11-30 AD"}, + {"1": 18, "2": "Abigail", "3": "Abbott", "4": "2022-03-23 AD"}, + {"1": 20, "2": "Abigail", "3": "Abbott", "4": "2022-04-29 AD"}, + {"1": 7, "2": "Aaron", "3": "Acevedo", "4": "2020-10-29 AD"}, + {"1": 16, "2": "Aaron", "3": "Acevedo", "4": "2022-02-04 AD"}, + {"1": 10, "2": "Abigail", "3": "Acevedo", "4": "2021-03-29 AD"}, + {"1": 11, "2": "Aaron", "3": "Acosta", "4": "2021-04-13 AD"}, + {"1": 2, "2": "Abigail", "3": "Acosta", "4": "2020-04-16 AD"} + ], + "grouping": { + "columns": [3, 2], + "preproc": null, + "groups": [ + { + "id": 1, + "count": 5, + "results_eq": {"2": "Abigail", "3": "Abbott"}, + "result_indices": [0, 1, 2, 3, 4] + }, + { + "id": 2, + "count": 2, + "results_eq": {"2": "Aaron", "3": "Acevedo"}, + "result_indices": [5, 6] + }, + { + "id": 3, + "count": 1, + "results_eq": {"2": "Abigail", "3": "Acevedo"}, + "result_indices": [7] + }, + { + "id": 4, + "count": 1, + "results_eq": {"2": "Aaron", "3": "Acosta"}, + "result_indices": [8] + }, + { + "id": 5, + "count": 1, + "results_eq": {"2": "Abigail", "3": "Acosta"}, + "result_indices": [9] + } + ] + }, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data("First Name") AS "2",' + ' msar.format_data("Last Name") AS "3", msar.format_data("Subscription Date") AS "4"' + ' FROM public."Customers" ORDER BY "3" ASC, "2" ASC, "1" ASC LIMIT ''10'' OFFSET NULL' + ) + ) + ); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => 3, + offset_ => null, + order_ => '[{"attnum": 3, "direction": "asc"}, {"attnum": 2, "direction": "asc"}]', + filter_ => null, + group_ => '{"columns": [3, 2]}' + ), + $j${ + "count": 21, + "results": [ + {"1": 5, "2": "Abigail", "3": "Abbott", "4": "2020-07-05 AD"}, + {"1": 8, "2": "Abigail", "3": "Abbott", "4": "2020-10-30 AD"}, + {"1": 15, "2": "Abigail", "3": "Abbott", "4": "2021-11-30 AD"} + ], + "grouping": { + "columns": [3, 2], + "preproc": null, + "groups": [ + { + "id": 1, + "count": 5, + "results_eq": {"2": "Abigail", "3": "Abbott"}, + "result_indices": [0, 1, 2] + } + ] + }, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data("First Name") AS "2",' + ' msar.format_data("Last Name") AS "3", msar.format_data("Subscription Date") AS "4"' + ' FROM public."Customers" ORDER BY "3" ASC, "2" ASC, "1" ASC LIMIT ''3'' OFFSET NULL' + ) + ) + ); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => 3, + offset_ => null, + order_ => '[{"attnum": 4, "direction": "asc"}]', + filter_ => null, + group_ => '{"columns": [4], "preproc": ["truncate_to_month"]}' + ), + $j${ + "count": 21, + "results": [ + {"1": 1, "2": "Aaron", "3": "Adams", "4": "2020-03-21 AD"}, + {"1": 2, "2": "Abigail", "3": "Acosta", "4": "2020-04-16 AD"}, + {"1": 3, "2": "Aaron", "3": "Adams", "4": "2020-04-29 AD"} + ], + "grouping": { + "columns": [4], + "preproc": ["truncate_to_month"], + "groups": [ + {"id": 1, "count": 1, "results_eq": {"4": "2020-03 AD"}, "result_indices": [0]}, + {"id": 2, "count": 2, "results_eq": {"4": "2020-04 AD"}, "result_indices": [1, 2]} + ] + }, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data("First Name") AS "2",' + ' msar.format_data("Last Name") AS "3", msar.format_data("Subscription Date") AS "4"' + ' FROM public."Customers" ORDER BY "4" ASC, "1" ASC LIMIT ''3'' OFFSET NULL' + ) + ) + ); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => 5, + offset_ => null, + order_ => '[{"attnum": 4, "direction": "asc"}]', + filter_ => null, + group_ => '{"columns": [4], "preproc": ["truncate_to_year"]}' + ), + $j${ + "count": 21, + "results": [ + {"1": 1, "2": "Aaron", "3": "Adams", "4": "2020-03-21 AD"}, + {"1": 2, "2": "Abigail", "3": "Acosta", "4": "2020-04-16 AD"}, + {"1": 3, "2": "Aaron", "3": "Adams", "4": "2020-04-29 AD"}, + {"1": 4, "2": "Abigail", "3": "Adams", "4": "2020-05-29 AD"}, + {"1": 5, "2": "Abigail", "3": "Abbott", "4": "2020-07-05 AD"} + ], + "grouping": { + "columns": [4], + "preproc": ["truncate_to_year"], + "groups": [ + {"id": 1, "count": 8, "results_eq": {"4": "2020 AD"}, "result_indices": [0, 1, 2, 3, 4]} + ] + }, + "linked_record_summaries": null, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data("First Name") AS "2",' + ' msar.format_data("Last Name") AS "3", msar.format_data("Subscription Date") AS "4"' + ' FROM public."Customers" ORDER BY "4" ASC, "1" ASC LIMIT ''5'' OFFSET NULL' + ) + ) + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_records_for_table_with_self_referential_fk() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_table_with_self_referential_fk(); + rel_id := 'categories'::regclass::oid; + RETURN NEXT is( + msar.list_records_from_table( + tab_id => rel_id, + limit_ => 10, + offset_ => null, + order_ => null, + filter_ => null, + group_ => null + ), + $j${ + "count": 6, + "results": [ + {"1": 1, "2": "Tools", "3": null}, + {"1": 2, "2": "Power tools", "3": 1}, + {"1": 3, "2": "Hand tools", "3": 1}, + {"1": 4, "2": "Drills", "3": 2}, + {"1": 5, "2": "Screwdrivers", "3": 3}, + {"1": 6, "2": "Wrenches", "3": 3} + ], + "grouping": null, + "record_summaries": null, + "linked_record_summaries": { + "3": { + "1": "Tools", + "2": "Power tools", + "3": "Hand tools" + } + } + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data(name) AS "2",' + ' msar.format_data(parent) AS "3" FROM public.categories ORDER BY "1" ASC LIMIT ''10'' OFFSET NULL' + ) + ) + ); +END; +$$ LANGUAGE plpgsql; + + +-- msar.get_current_role --------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION __setup_get_current_role() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE USER parent1; + CREATE ROLE parent2; + CREATE ROLE child_role; + GRANT parent1 TO child_role; + GRANT parent2 TO child_role; + GRANT USAGE ON SCHEMA msar, __msar TO child_role; + -- GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar TO intern_no_pkey; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_get_current_role() RETURNS SETOF TEXT AS $$ +DECLARE + child_role_oid oid; + parent1_oid oid; + parent2_oid oid; +BEGIN + PERFORM __setup_get_current_role(); + SET ROLE child_role; + child_role_oid := 'child_role'::regrole::oid; + parent1_oid := 'parent1'::regrole::oid; + parent2_oid := 'parent2'::regrole::oid; + RETURN NEXT is( + msar.get_current_role(), + format($j${ + "current_role":{ + "oid": %1$s, + "name": "child_role", + "login": false, + "super": false, + "members": null, + "inherits": true, + "create_db": false, + "create_role": false, + "description": null + }, + "parent_roles": [ + { + "oid": %2$s, + "name": "parent1", + "login": true, + "super": false, + "members": [{"oid": %1$s, "admin": false}], + "inherits": true, + "create_db": false, + "create_role": false, + "description": null + }, + { + "oid": %3$s, + "name": "parent2", + "login": false, + "super": false, + "members": [{"oid": %1$s, "admin": false}], + "inherits": true, + "create_db": false, + "create_role": false, + "description": null + } + ] + } + $j$, child_role_oid, parent1_oid, parent2_oid + )::jsonb + ); +END; +$$ LANGUAGE plpgsql; + +-- msar.build_order_by_expr ------------------------------------------------------------------------ + +CREATE OR REPLACE FUNCTION test_build_order_by_expr() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is(msar.build_order_by_expr(rel_id, null), 'ORDER BY "1" ASC'); + RETURN NEXT is( + msar.build_order_by_expr(rel_id, '[{"attnum": 1, "direction": "desc"}]'), + 'ORDER BY "1" DESC, "1" ASC' + ); + RETURN NEXT is( + msar.build_order_by_expr( + rel_id, '[{"attnum": 3, "direction": "asc"}, {"attnum": 5, "direction": "DESC"}]' + ), + 'ORDER BY "3" ASC, "5" DESC, "1" ASC' + ); + CREATE ROLE intern_no_pkey; + GRANT USAGE ON SCHEMA msar, __msar TO intern_no_pkey; + GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar TO intern_no_pkey; + GRANT SELECT (col1, col2, col3, col4) ON TABLE atable TO intern_no_pkey; + SET ROLE intern_no_pkey; + RETURN NEXT is( + msar.build_order_by_expr(rel_id, null), 'ORDER BY "2" ASC, "3" ASC, "5" ASC' + ); + SET ROLE NONE; + REVOKE ALL ON TABLE atable FROM intern_no_pkey; + SET ROLE intern_no_pkey; + RETURN NEXT is(msar.build_order_by_expr(rel_id, null), null); +END; +$$ LANGUAGE plpgsql; + + +-- msar.build_expr --------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION test_build_expr() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'equal', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(col1) = (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'lesser', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(col1) < (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'greater', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(col1) > (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'lesser_or_equal', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(col1) <= (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'greater_or_equal', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(col1) >= (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'null', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2)))), + '(col1) IS NULL' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'not_null', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2)))), + '(col1) IS NOT NULL' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'contains_case_insensitive', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2), + jsonb_build_object('type', 'literal', 'value', 'ABc')))), + 'strpos(lower(col1), lower(''ABc''))::boolean' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'starts_with_case_insensitive', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2), + jsonb_build_object('type', 'literal', 'value', 'a''bc')))), + 'starts_with(lower(col1), lower(''a''''bc''))' + ); + RETURN NEXT is( + -- composition for json_array_length_equals + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'equal', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'json_array_length', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(jsonb_array_length((col1)::jsonb)) = (''500'')' + ); + RETURN NEXT is( + -- composition for json_array_length_greater_than + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'greater', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'json_array_length', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(jsonb_array_length((col1)::jsonb)) > (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + -- composition for json_array_length_greater_or_equal + rel_id, + jsonb_build_object( + 'type', 'greater_or_equal', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'json_array_length', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(jsonb_array_length((col1)::jsonb)) >= (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + -- composition for json_array_length_less_than + rel_id, + jsonb_build_object( + 'type', 'lesser', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'json_array_length', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(jsonb_array_length((col1)::jsonb)) < (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + -- composition for json_array_length_less_or_equal + rel_id, + jsonb_build_object( + 'type', 'lesser_or_equal', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'json_array_length', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 500)))), + '(jsonb_array_length((col1)::jsonb)) <= (''500'')' + ); + RETURN NEXT is( + msar.build_expr( + -- composition for json_array_not_empty + rel_id, + jsonb_build_object( + 'type', 'greater', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'json_array_length', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 0)))), + '(jsonb_array_length((col1)::jsonb)) > (''0'')' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'json_array_contains', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2), + jsonb_build_object('type', 'literal', 'value', '"500"')))), + '(col1)::jsonb @> (''"500"'')::jsonb' + ); + RETURN NEXT is( + msar.build_expr( + -- composition for uri_scheme_equals + rel_id, + jsonb_build_object( + 'type', 'equal', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'uri_scheme', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 'https')))), + '(mathesar_types.uri_scheme(col1)) = (''https'')' + ); + RETURN NEXT is( + -- composition for uri_authority_contains + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'contains', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'uri_authority', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 'google')))), + 'strpos((mathesar_types.uri_authority(col1)), (''google''))::boolean' + ); + RETURN NEXT is( + -- composition for email_domain_equals + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'equal', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'email_domain', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 'gmail.com')))), + '(mathesar_types.email_domain_name(col1)) = (''gmail.com'')' + ); + RETURN NEXT is( + -- composition for email_domain_contains + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'contains', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'email_domain', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 'mail')))), + 'strpos((mathesar_types.email_domain_name(col1)), (''mail''))::boolean' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'or', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'contains', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'email_domain', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 2) + ) + ), + jsonb_build_object('type', 'literal', 'value', 'mail')) + ), + jsonb_build_object( + 'type', 'equal', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 3), + jsonb_build_object('type', 'literal', 'value', 500)) + ) + ) + ) + ), + '(strpos((mathesar_types.email_domain_name(col1)), (''mail''))::boolean) OR ((col2) = (''500''))' + ); + RETURN NEXT is( + msar.build_expr( + rel_id, + jsonb_build_object( + 'type', 'or', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'and', 'args', jsonb_build_array( + jsonb_build_object( + 'type', 'equal', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 3), + jsonb_build_object('type', 'literal', 'value', 500)) + ), + jsonb_build_object( + 'type', 'lesser', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 4), + jsonb_build_object('type', 'literal', 'value', 'abcde')) + ) + ) + ), + jsonb_build_object( + 'type', 'greater', 'args', jsonb_build_array( + jsonb_build_object('type', 'attnum', 'value', 1), + jsonb_build_object('type', 'literal', 'value', 20)) + ) + ) + ) + ), + '(((col2) = (''500'')) AND ((col3) < (''abcde''))) OR ((id) > (''20''))' + ); +END; +$$ LANGUAGE plpgsql; + + +-- msar.search_records_from_table ------------------------------------------------------------------ + +CREATE OR REPLACE FUNCTION __setup_search_records_table() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE atable ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + col1 integer, + col2 varchar, + coltodrop integer + ); + ALTER TABLE atable DROP COLUMN coltodrop; + INSERT INTO atable (col1, col2) VALUES + (1, 'bcdea'), + (12, 'vwxyz'), + (1, 'edcba'), + (2, 'abcde'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_search_records_from_table() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + search_result jsonb; +BEGIN + PERFORM __setup_search_records_table(); + rel_id := 'atable'::regclass::oid; + search_result := msar.search_records_from_table( + rel_id, + jsonb_build_array( + jsonb_build_object('attnum', 2, 'literal', 3) + ), + 10 + ); + RETURN NEXT is(search_result -> 'results', jsonb_build_array()); + RETURN NEXT is((search_result -> 'count')::integer, 0); + + search_result := msar.search_records_from_table( + rel_id, + jsonb_build_array( + jsonb_build_object('attnum', 3, 'literal', 'bc') + ), + null + ); + RETURN NEXT is( + search_result -> 'results', + jsonb_build_array( + jsonb_build_object('1', 1, '2', 1, '3', 'bcdea'), + jsonb_build_object('1', 4, '2', 2, '3', 'abcde') + ) + ); + RETURN NEXT is((search_result -> 'count')::integer, 2); + + search_result := msar.search_records_from_table( + rel_id, + jsonb_build_array(), + 10 + ); + RETURN NEXT is( + search_result -> 'results', + jsonb_build_array( + jsonb_build_object('1', 1, '2', 1, '3', 'bcdea'), + jsonb_build_object('1', 2, '2', 12, '3', 'vwxyz'), + jsonb_build_object('1', 3, '2', 1, '3', 'edcba'), + jsonb_build_object('1', 4, '2', 2, '3', 'abcde') + ) + ); + RETURN NEXT is((search_result -> 'count')::integer, 4); + + search_result := msar.search_records_from_table( + rel_id, + null, + 10 + ); + RETURN NEXT is( + search_result -> 'results', + jsonb_build_array( + jsonb_build_object('1', 1, '2', 1, '3', 'bcdea'), + jsonb_build_object('1', 2, '2', 12, '3', 'vwxyz'), + jsonb_build_object('1', 3, '2', 1, '3', 'edcba'), + jsonb_build_object('1', 4, '2', 2, '3', 'abcde') + ) + ); + RETURN NEXT is((search_result -> 'count')::integer, 4); + + search_result := msar.search_records_from_table( + rel_id, + jsonb_build_array( + jsonb_build_object('attnum', 3, 'literal', 'bc') + ), + 10 + ); + RETURN NEXT is( + search_result -> 'results', + jsonb_build_array( + jsonb_build_object('1', 1, '2', 1, '3', 'bcdea'), + jsonb_build_object('1', 4, '2', 2, '3', 'abcde') + ) + ); + RETURN NEXT is((search_result -> 'count')::integer, 2); + + search_result := msar.search_records_from_table( + rel_id, + jsonb_build_array( + jsonb_build_object('attnum', 3, 'literal', 'b'), + jsonb_build_object('attnum', 2, 'literal', 1) + ), + 10 + ); + RETURN NEXT is( + search_result -> 'results', + jsonb_build_array( + jsonb_build_object('1', 1, '2', 1, '3', 'bcdea'), + jsonb_build_object('1', 3, '2', 1, '3', 'edcba'), + jsonb_build_object('1', 4, '2', 2, '3', 'abcde') + ) + ); + RETURN NEXT is((search_result -> 'count')::integer, 3); + + search_result := msar.search_records_from_table( + rel_id, + jsonb_build_array( + jsonb_build_object('attnum', 3, 'literal', 'a'), + jsonb_build_object('attnum', 2, 'literal', 1) + ), + 10 + ); + RETURN NEXT is( + search_result -> 'results', + jsonb_build_array( + jsonb_build_object('1', 1, '2', 1, '3', 'bcdea'), + jsonb_build_object('1', 3, '2', 1, '3', 'edcba'), + jsonb_build_object('1', 4, '2', 2, '3', 'abcde') + ) + ); + RETURN NEXT is((search_result -> 'count')::integer, 3); + + search_result := msar.search_records_from_table( + rel_id, + jsonb_build_array( + jsonb_build_object('attnum', 3, 'literal', 'a') + ), + 10 + ); + RETURN NEXT is( + search_result -> 'results', + jsonb_build_array( + jsonb_build_object('1', 4, '2', 2, '3', 'abcde'), + jsonb_build_object('1', 1, '2', 1, '3', 'bcdea'), + jsonb_build_object('1', 3, '2', 1, '3', 'edcba') + ) + ); + RETURN NEXT is((search_result -> 'count')::integer, 3); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_get_record_from_table() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + record_2_results jsonb := '[ + { + "1": 2, + "2": 34, + "3": "sdflfflsk", + "4": null, + "5": [1, 2, 3, 4] + } + ]'::jsonb; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + + -- We should be able to retrieve a single record + RETURN NEXT is(msar.get_record_from_table(rel_id, 2) -> 'results', record_2_results); + + -- We should be able to retrieve a record via stringified primary key + RETURN NEXT is(msar.get_record_from_table(rel_id, '2') -> 'results', record_2_results); + + -- We should get an empty array if the record does not exist + RETURN NEXT is(msar.get_record_from_table(rel_id, 200) -> 'results', '[]'::jsonb); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_delete_records_from_table_single() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + delete_result integer; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + delete_result := msar.delete_records_from_table( + rel_id, + '[2]' + ); + RETURN NEXT is(delete_result, 1); + RETURN NEXT results_eq( + 'SELECT id FROM atable ORDER BY id', + $v$VALUES ('1'::integer), ('3'::integer)$v$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_delete_records_from_table_null() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + delete_result integer; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + delete_result := msar.delete_records_from_table( + rel_id, + null + ); + RETURN NEXT is(delete_result, null); + RETURN NEXT results_eq( + 'SELECT id FROM atable ORDER BY id', + $v$VALUES ('1'::integer), ('2'::integer), ('3'::integer)$v$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_delete_records_from_table_empty() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + delete_result integer; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + delete_result := msar.delete_records_from_table( + rel_id, + '[]' + ); + RETURN NEXT is(delete_result, 0); + RETURN NEXT results_eq( + 'SELECT id FROM atable ORDER BY id', + $v$VALUES ('1'::integer), ('2'::integer), ('3'::integer)$v$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_delete_records_from_table_multi() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + delete_result integer; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + delete_result := msar.delete_records_from_table( + rel_id, + '[1, 2]' + ); + RETURN NEXT is(delete_result, 2); + RETURN NEXT results_eq( + 'SELECT id FROM atable ORDER BY id', + $v$VALUES ('3'::integer)$v$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_delete_records_from_table_multi_nonexist() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + delete_result integer; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + delete_result := msar.delete_records_from_table( + rel_id, + '[1, 2, 342]' + ); + RETURN NEXT is(delete_result, 2); + RETURN NEXT results_eq( + 'SELECT id FROM atable ORDER BY id', + $v$VALUES ('3'::integer)$v$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_delete_records_from_table_no_pkey() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + delete_result integer; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + CREATE ROLE intern_no_pkey; + GRANT USAGE ON SCHEMA msar, __msar TO intern_no_pkey; + GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar TO intern_no_pkey; + GRANT SELECT ON ALL TABLES IN SCHEMA msar, __msar TO INTERN_no_pkey; + GRANT SELECT (col1, col2, col3, col4) ON TABLE atable TO intern_no_pkey; + SET ROLE intern_no_pkey; + RETURN NEXT throws_ok( + format('SELECT msar.delete_records_from_table(%s, ''[2, 3]'')', rel_id), + '42501', + 'permission denied for table atable', + 'Throw error when trying to delete without permission' + ); + SET ROLE NONE; + RETURN NEXT results_eq( + 'SELECT id FROM atable ORDER BY id', + $v$VALUES ('1'::integer), ('2'::integer), ('3'::integer)$v$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_delete_records_from_table_stringy_pkey() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + delete_result integer; +BEGIN + PERFORM __setup_list_records_table(); + rel_id := 'atable'::regclass::oid; + delete_result := msar.delete_records_from_table( + rel_id, + '["1", "2"]' + ); + RETURN NEXT is(delete_result, 2); + RETURN NEXT results_eq( + 'SELECT id FROM atable ORDER BY id', + $v$VALUES ('3'::integer)$v$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_add_record_table() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_list_records_table(); + ALTER TABLE atable ADD UNIQUE (col2); + ALTER TABLE atable ALTER COLUMN col1 SET DEFAULT 200; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_record_to_table_all() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.add_record_to_table( + rel_id, + '{"2": 234, "3": "ab234", "4": {"key": "val"}, "5": {"key2": "val2"}}' + ), + $a${ + "results": [{"1": 4, "2": 234, "3": "ab234", "4": {"key": "val"}, "5": {"key2": "val2"}}], + "linked_record_summaries": null, + "record_summaries": null + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_record_to_table_stringified_json() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.add_record_to_table( + rel_id, + '{"2": 234, "3": "ab234", "4": {"key": "val"}, "5": "{\"key2\": \"val2\"}"}' + ), + $a${ + "results": [{"1": 4, "2": 234, "3": "ab234", "4": {"key": "val"}, "5": {"key2": "val2"}}], + "linked_record_summaries": null, + "record_summaries": null + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_record_to_table_use_default() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.add_record_to_table( + rel_id, + '{"3": "ab234", "4": {"key": "val"}, "5": {"key2": "val2"}}' + ), + $a${ + "results": [{"1": 4, "2": 200, "3": "ab234", "4": {"key": "val"}, "5": {"key2": "val2"}}], + "linked_record_summaries": null, + "record_summaries": null + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_record_to_table_null_default() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.add_record_to_table( + rel_id, + '{"2": null, "3": "ab234", "4": {"key": "val"}, "5": {"key2": "val2"}}' + ), + $a${ + "results": [{"1": 4, "2": null, "3": "ab234", "4": {"key": "val"}, "5": {"key2": "val2"}}], + "linked_record_summaries": null, + "record_summaries": null + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_record_to_table_nonobj_json() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.add_record_to_table( + rel_id, + '{"2": null, "3": "ab234", "4": 3, "5": "\"234\""}' + ), + $a${ + "results": [{"1": 4, "2": null, "3": "ab234", "4": 3, "5": "234"}], + "linked_record_summaries": null, + "record_summaries": null + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_patch_record_in_table_single() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.patch_record_in_table( rel_id, 2, '{"2": 10}'), + $p${ + "results": [{"1": 2, "2": 10, "3": "sdflfflsk", "4": null, "5": [1, 2, 3, 4]}], + "linked_record_summaries": null, + "record_summaries": null + }$p$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_patch_record_in_table_string_pk() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.patch_record_in_table( rel_id, '2', '{"2": 10}'), + $p${ + "results": [{"1": 2, "2": 10, "3": "sdflfflsk", "4": null, "5": [1, 2, 3, 4]}], + "linked_record_summaries": null, + "record_summaries": null + }$p$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_patch_record_in_table_multi() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.patch_record_in_table( rel_id, 2, '{"2": 10, "4": {"a": "json"}}'), + $p${ + "results": [{"1": 2, "2": 10, "3": "sdflfflsk", "4": {"a": "json"}, "5": [1, 2, 3, 4]}], + "linked_record_summaries": null, + "record_summaries": null + }$p$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_patch_record_in_table_leaves_other_rows() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; + patch_result jsonb; +BEGIN + PERFORM __setup_add_record_table(); + rel_id := 'atable'::regclass::oid; + PERFORM msar.patch_record_in_table( rel_id, 2, '{"2": 10}'); + RETURN NEXT results_eq( + 'SELECT id, col1 FROM atable ORDER BY id', + 'VALUES (1, 5), (2, 10), (3, 2)' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_add_records_table_only_pk() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE TABLE atable ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_record_to_table_only_pk() RETURNS SETOF TEXT AS $$ +DECLARE + rel_id oid; +BEGIN + PERFORM __setup_add_records_table_only_pk(); + rel_id := 'atable'::regclass::oid; + RETURN NEXT is( + msar.add_record_to_table( + rel_id, + '{}'::jsonb + ), + $a${ + "results": [{"1": 1}], + "linked_record_summaries": null, + "record_summaries": null + }$a$ + ); + RETURN NEXT is( + msar.add_record_to_table( + rel_id, + '{}'::jsonb + ), + $a${ + "results": [{"1": 2}], + "linked_record_summaries": null, + "record_summaries": null + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_preview_fkey_cols() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE "Counselors" ( + coid numeric UNIQUE, + "Name" text, + "Email" text +); +CREATE TABLE "Teachers" ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "Counselor" numeric REFERENCES "Counselors" (coid), + "Name" text, + "Email" text +); +CREATE TABLE "Students" ( + id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "Counselor" numeric REFERENCES "Counselors" (coid), + "Teacher" integer REFERENCES "Teachers" (id), + "Name" text, + "Grade" integer, + "Email" text +); + +INSERT INTO "Counselors" VALUES + (1.234, 'Alice Alison', 'aalison@example.edu'), + (2.345, 'Bob Bobinson', 'bbobinson@example.edu'); + +INSERT INTO "Teachers" ("Counselor", "Name", "Email") VALUES + (1.234, 'Carol Carlson', 'ccarlson@example.edu'), + (2.345, 'Dave Davidson', 'ddavison@example.edu'), + (1.234, 'Eve Evilson', 'eevilson@example.edu'); + +INSERT INTO "Students" ("Counselor", "Teacher", "Name", "Grade", "Email") VALUES + (2.345, 3, 'Fred Fredrickson', 95, 'ffredrickson@example.edu'), + (1.234, 1, 'Gabby Gabberson', 100, 'ggabberson@example.edu'), + (1.234, 2, 'Hank Hankson', 75, 'hhankson@example.edu'), + (2.345, 1, 'Ida Idalia', 90, 'iidalia@example.edu'), + (2.345, 2, 'James Jameson', 80, 'jjameson@example.edu'), + (1.234, 3, 'Kelly Kellison', 80, 'kkellison@example.edu'); + +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_records_with_preview() RETURNS SETOF TEXT AS $$ +DECLARE + list_result jsonb; +BEGIN + PERFORM __setup_preview_fkey_cols(); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => '"Students"'::regclass::oid, + limit_ => null, + offset_ => null, + order_ => null, + filter_ => null, + group_ => null, + return_record_summaries => true + ), + $j${ + "count": 6, + "results": [ + {"1": 1, "2": 2.345, "3": 3, "4": "Fred Fredrickson", "5": 95, "6": "ffredrickson@example.edu"}, + {"1": 2, "2": 1.234, "3": 1, "4": "Gabby Gabberson", "5": 100, "6": "ggabberson@example.edu"}, + {"1": 3, "2": 1.234, "3": 2, "4": "Hank Hankson", "5": 75, "6": "hhankson@example.edu"}, + {"1": 4, "2": 2.345, "3": 1, "4": "Ida Idalia", "5": 90, "6": "iidalia@example.edu"}, + {"1": 5, "2": 2.345, "3": 2, "4": "James Jameson", "5": 80, "6": "jjameson@example.edu"}, + {"1": 6, "2": 1.234, "3": 3, "4": "Kelly Kellison", "5": 80, "6": "kkellison@example.edu"} + ], + "grouping": null, + "linked_record_summaries": { + "2": { + "1.234": "Alice Alison", + "2.345": "Bob Bobinson" + }, + "3": { + "1": "Carol Carlson", + "2": "Dave Davidson", + "3": "Eve Evilson" + } + }, + "record_summaries": { + "1": "Fred Fredrickson", + "2": "Gabby Gabberson", + "3": "Hank Hankson", + "4": "Ida Idalia", + "5": "James Jameson", + "6": "Kelly Kellison" + } + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data("Counselor") AS "2",', + ' msar.format_data("Teacher") AS "3", msar.format_data("Name") AS "4",', + ' msar.format_data("Grade") AS "5", msar.format_data("Email") AS "6"', + ' FROM public."Students" ORDER BY "1" ASC LIMIT NULL OFFSET NULL' + ) + ) + ); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => '"Students"'::regclass::oid, + limit_ => 3, + offset_ => 1, + order_ => null, + filter_ => null, + group_ => null + ), + $j${ + "count": 6, + "results": [ + {"1": 2, "2": 1.234, "3": 1, "4": "Gabby Gabberson", "5": 100, "6": "ggabberson@example.edu"}, + {"1": 3, "2": 1.234, "3": 2, "4": "Hank Hankson", "5": 75, "6": "hhankson@example.edu"}, + {"1": 4, "2": 2.345, "3": 1, "4": "Ida Idalia", "5": 90, "6": "iidalia@example.edu"} + ], + "grouping": null, + "linked_record_summaries": { + "2": { + "1.234": "Alice Alison", + "2.345": "Bob Bobinson" + }, + "3": { + "1": "Carol Carlson", + "2": "Dave Davidson" + } + }, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data("Counselor") AS "2",', + ' msar.format_data("Teacher") AS "3", msar.format_data("Name") AS "4",', + ' msar.format_data("Grade") AS "5", msar.format_data("Email") AS "6"', + ' FROM public."Students" ORDER BY "1" ASC LIMIT ''3'' OFFSET ''1''' + ) + ) + ); + RETURN NEXT is( + msar.list_records_from_table( + tab_id => '"Students"'::regclass::oid, + limit_ => 2, + offset_ => null, + order_ => '[{"attnum": 2, "direction": "asc"}]', + filter_ => null, + group_ => '{"columns": [2]}' + ), + $j${ + "count": 6, + "results": [ + {"1": 2, "2": 1.234, "3": 1, "4": "Gabby Gabberson", "5": 100, "6": "ggabberson@example.edu"}, + {"1": 3, "2": 1.234, "3": 2, "4": "Hank Hankson", "5": 75, "6": "hhankson@example.edu"} + ], + "grouping": { + "columns": [2], + "preproc": null, + "groups": [{"id": 1, "count": 3, "results_eq": {"2": 1.234}, "result_indices": [0, 1]}] + }, + "linked_record_summaries": { + "2": { + "1.234": "Alice Alison" + }, + "3": { + "1": "Carol Carlson", + "2": "Dave Davidson" + } + }, + "record_summaries": null + }$j$ || jsonb_build_object( + 'query', concat( + 'SELECT msar.format_data(id) AS "1", msar.format_data("Counselor") AS "2",', + ' msar.format_data("Teacher") AS "3", msar.format_data("Name") AS "4",', + ' msar.format_data("Grade") AS "5", msar.format_data("Email") AS "6"', + ' FROM public."Students" ORDER BY "2" ASC, "1" ASC LIMIT ''2'' OFFSET NULL' + ) + ) + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_add_record_to_table_with_preview() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_preview_fkey_cols(); + RETURN NEXT is( + msar.add_record_to_table( + '"Students"'::regclass::oid, + '{"2": 2.345, "3": 1, "4": "Larry Laurelson", "5": 70, "6": "llaurelson@example.edu"}', + true + ), + $a${ + "results": [ + {"1": 7, "2": 2.345, "3": 1, "4": "Larry Laurelson", "5": 70, "6": "llaurelson@example.edu"} + ], + "linked_record_summaries": { + "2": {"2.345": "Bob Bobinson"}, + "3": {"1": "Carol Carlson"} + }, + "record_summaries": {"7": "Larry Laurelson"} + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_patch_record_in_table_with_preview() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_preview_fkey_cols(); + RETURN NEXT is( + msar.patch_record_in_table( + '"Students"'::regclass::oid, + 2, + '{"2": 2.345, "3": 2, "5": 85}' + ), + $a${ + "results": [ + {"1": 2, "2": 2.345, "3": 2, "4": "Gabby Gabberson", "5": 85, "6": "ggabberson@example.edu"} + ], + "linked_record_summaries": { + "2": {"2.345": "Bob Bobinson"}, + "3": {"2": "Dave Davidson"} + }, + "record_summaries": null + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_search_records_in_table_with_preview() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_preview_fkey_cols(); + RETURN NEXT is( + msar.search_records_from_table( + '"Students"'::regclass::oid, + '[{"attnum": 4, "literal": "k"}]', + 2 + ) -> 'linked_record_summaries', + $a${ + "2": {"1.234": "Alice Alison", "2.345": "Bob Bobinson"}, + "3": {"3": "Eve Evilson"} + }$a$ + ); +END; +$$ LANGUAGE plpgsql; + + +-- msar.replace_database_privileges_for_roles ------------------------------------------------------ + +CREATE OR REPLACE FUNCTION +test_replace_database_privileges_for_roles_basic() RETURNS SETOF TEXT AS $$/* +Happy path, smoke test. +*/ +DECLARE + alice_id oid; + bob_id oid; +BEGIN + CREATE ROLE "Alice"; + CREATE ROLE bob; + alice_id := '"Alice"'::regrole::oid; + bob_id := 'bob'::regrole::oid; + + RETURN NEXT set_eq( + format( + $t1$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_database_privileges_for_roles(jsonb_build_array(jsonb_build_object( + 'role_oid', %1$s, 'direct', jsonb_build_array('CONNECT', 'CREATE'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t1$, + alice_id + ), + ARRAY['CONNECT', 'CREATE'], + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_database, LATERAL aclexplode(pg_database.datacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['CONNECT', 'CREATE'], + 'Privileges should be updated for actual database properly' + ); + RETURN NEXT set_eq( + format( + $t2$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_database_privileges_for_roles(jsonb_build_array(jsonb_build_object( + 'role_oid', %1$s, 'direct', jsonb_build_array('CONNECT'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t2$, + bob_id + ), + ARRAY['CONNECT'], + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_database, LATERAL aclexplode(pg_database.datacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['CONNECT', 'CREATE'], + 'Alice''s privileges should be left alone properly' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_database, LATERAL aclexplode(pg_database.datacl) acl', + format(' WHERE acl.grantee=%s;', bob_id) + ), + ARRAY['CONNECT'], + 'Privileges should be updated for actual database properly' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_alice_and_bob_preloaded() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE ROLE "Alice"; + GRANT CONNECT, CREATE ON DATABASE mathesar_testing TO "Alice"; + CREATE ROLE bob; + GRANT CONNECT ON DATABASE mathesar_testing TO bob; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +test_replace_database_privileges_for_roles_multi_ops() RETURNS SETOF TEXT AS $$/* +Test that we can add/revoke multiple privileges to/from multiple roles simultaneously. +*/ +DECLARE + alice_id oid; + bob_id oid; +BEGIN + PERFORM __setup_alice_and_bob_preloaded(); + alice_id := '"Alice"'::regrole::oid; + bob_id := 'bob'::regrole::oid; + RETURN NEXT set_eq( + -- Revoke CREATE from Alice, Grant CREATE to Bob. + format( + $t1$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_database_privileges_for_roles(jsonb_build_array( + jsonb_build_object('role_oid', %1$s, 'direct', jsonb_build_array('CONNECT')), + jsonb_build_object('role_oid', %2$s, 'direct', jsonb_build_array('CONNECT', 'CREATE'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t1$, + alice_id, + bob_id + ), + ARRAY['CONNECT'], -- This only checks form of Alice's info in response. + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_database, LATERAL aclexplode(pg_database.datacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['CONNECT'], + 'Alice''s privileges should be updated for actual database properly' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_database, LATERAL aclexplode(pg_database.datacl) acl', + format(' WHERE acl.grantee=%s;', bob_id) + ), + ARRAY['CONNECT', 'CREATE'], + 'Bob''s privileges should be updated for actual database properly' + ); +END; +$$ LANGUAGE plpgsql; + + +-- msar.replace_schema_privileges_for_roles -------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +test_replace_schema_privileges_for_roles_basic() RETURNS SETOF TEXT AS $$/* +Happy path, smoke test. +*/ +DECLARE + schema_id oid; + alice_id oid; + bob_id oid; +BEGIN + CREATE SCHEMA restricted_test; + schema_id := 'restricted_test'::regnamespace::oid; + CREATE ROLE "Alice"; + CREATE ROLE bob; + alice_id := '"Alice"'::regrole::oid; + bob_id := 'bob'::regrole::oid; + + RETURN NEXT set_eq( + format( + $t1$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_schema_privileges_for_roles(%2$s, jsonb_build_array(jsonb_build_object( + 'role_oid', %1$s, 'direct', jsonb_build_array('USAGE', 'CREATE'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t1$, + alice_id, + schema_id + ), + ARRAY['USAGE', 'CREATE'], + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_namespace, LATERAL aclexplode(pg_namespace.nspacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['USAGE', 'CREATE'], + 'Privileges should be updated for actual schema properly' + ); + RETURN NEXT set_eq( + format( + $t2$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_schema_privileges_for_roles(%2$s, jsonb_build_array(jsonb_build_object( + 'role_oid', %1$s, 'direct', jsonb_build_array('USAGE'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t2$, + bob_id, + schema_id + ), + ARRAY['USAGE'], + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_namespace, LATERAL aclexplode(pg_namespace.nspacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['USAGE', 'CREATE'], + 'Alice''s privileges should be left alone properly' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_namespace, LATERAL aclexplode(pg_namespace.nspacl) acl', + format(' WHERE acl.grantee=%s;', bob_id) + ), + ARRAY['USAGE'], + 'Privileges should be updated for actual schema properly' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +test_replace_schema_privileges_for_roles_multi_ops() RETURNS SETOF TEXT AS $$/* +Test that we can add/revoke multiple privileges to/from multiple roles simultaneously. +*/ +DECLARE + schema_id oid; + alice_id oid; + bob_id oid; +BEGIN + CREATE SCHEMA "test Multiops"; + schema_id := '"test Multiops"'::regnamespace::oid; + + CREATE ROLE "Alice"; + CREATE ROLE bob; + alice_id := '"Alice"'::regrole::oid; + bob_id := 'bob'::regrole::oid; + + GRANT USAGE, CREATE ON SCHEMA "test Multiops" TO "Alice"; + GRANT USAGE ON SCHEMA "test Multiops" TO bob; + + RETURN NEXT set_eq( + -- Revoke CREATE from Alice, Grant CREATE to Bob. + format( + $t1$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_schema_privileges_for_roles(%3$s, jsonb_build_array( + jsonb_build_object('role_oid', %1$s, 'direct', jsonb_build_array('USAGE')), + jsonb_build_object('role_oid', %2$s, 'direct', jsonb_build_array('USAGE', 'CREATE'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t1$, + alice_id, + bob_id, + schema_id + ), + ARRAY['USAGE'], -- This only checks form of Alice's info in response. + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_namespace, LATERAL aclexplode(pg_namespace.nspacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['USAGE'], + 'Alice''s privileges should be updated for actual schema properly' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_namespace, LATERAL aclexplode(pg_namespace.nspacl) acl', + format(' WHERE acl.grantee=%s;', bob_id) + ), + ARRAY['USAGE', 'CREATE'], + 'Bob''s privileges should be updated for actual schema properly' + ); +END; +$$ LANGUAGE plpgsql; + + +-- msar.replace_table_privileges_for_roles -------------------------------------------------------- + +CREATE OR REPLACE FUNCTION +test_replace_table_privileges_for_roles_basic() RETURNS SETOF TEXT AS $$/* +Happy path, smoke test. +*/ +DECLARE + table_id oid; + alice_id oid; + bob_id oid; +BEGIN + CREATE TABLE restricted_table(); + table_id := 'restricted_table'::regclass::oid; + CREATE ROLE "Alice"; + CREATE ROLE bob; + alice_id := '"Alice"'::regrole::oid; + bob_id := 'bob'::regrole::oid; + + RETURN NEXT set_eq( + format( + $t1$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_table_privileges_for_roles(%2$s, jsonb_build_array(jsonb_build_object( + 'role_oid', %1$s, 'direct', jsonb_build_array('SELECT', 'UPDATE'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t1$, + alice_id, + table_id + ), + ARRAY['SELECT', 'UPDATE'], + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_class, LATERAL aclexplode(pg_class.relacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['SELECT', 'UPDATE'], + 'Privileges should be updated for actual table properly' + ); + RETURN NEXT set_eq( + format( + $t2$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_table_privileges_for_roles(%2$s, jsonb_build_array(jsonb_build_object( + 'role_oid', %1$s, 'direct', jsonb_build_array('INSERT', 'SELECT', 'DELETE'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t2$, + bob_id, + table_id + ), + ARRAY['INSERT', 'SELECT', 'DELETE'], + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_class, LATERAL aclexplode(pg_class.relacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['SELECT', 'UPDATE'], + 'Alice''s privileges should be left alone properly' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_class, LATERAL aclexplode(pg_class.relacl) acl', + format(' WHERE acl.grantee=%s;', bob_id) + ), + ARRAY['INSERT', 'SELECT', 'DELETE'], + 'Privileges should be updated for actual table properly' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION +test_replace_table_privileges_for_roles_multi_ops() RETURNS SETOF TEXT AS $$/* +Test that we can add/revoke multiple privileges to/from multiple roles simultaneously. +*/ +DECLARE + table_id oid; + alice_id oid; + bob_id oid; +BEGIN + CREATE TABLE "test Multiops table"(); + table_id := '"test Multiops table"'::regclass::oid; + + CREATE ROLE "Alice"; + CREATE ROLE bob; + alice_id := '"Alice"'::regrole::oid; + bob_id := 'bob'::regrole::oid; + + GRANT SELECT, DELETE ON TABLE "test Multiops table" TO "Alice"; + GRANT INSERT, UPDATE ON TABLE "test Multiops table" TO bob; + + RETURN NEXT set_eq( + -- Grant INSERT, SELECT and UPDATE to Alice, Revoke DELETE. + -- Grant SELECT and DELETE to Bob, Revoke INSERT and UPDATE. + format( + $t1$SELECT jsonb_array_elements_text(direct) FROM jsonb_to_recordset( + msar.replace_table_privileges_for_roles(%3$s, jsonb_build_array( + jsonb_build_object('role_oid', %1$s, 'direct', jsonb_build_array('INSERT', 'SELECT', 'UPDATE')), + jsonb_build_object('role_oid', %2$s, 'direct', jsonb_build_array('SELECT', 'DELETE'))))) + AS x(direct jsonb, role_oid regrole) + WHERE role_oid=%1$s $t1$, + alice_id, + bob_id, + table_id + ), + ARRAY['INSERT', 'SELECT', 'UPDATE'], -- This only checks form of Alice's info in response. + 'Response should contain updated role info in correct form' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_class, LATERAL aclexplode(pg_class.relacl) acl', + format(' WHERE acl.grantee=%s;', alice_id) + ), + ARRAY['INSERT', 'SELECT', 'UPDATE'], + 'Alice''s privileges should be updated for actual table properly' + ); + RETURN NEXT set_eq( + concat( + 'SELECT privilege_type FROM pg_class, LATERAL aclexplode(pg_class.relacl) acl', + format(' WHERE acl.grantee=%s;', bob_id) + ), + ARRAY['SELECT', 'DELETE'], + 'Bob''s privileges should be updated for actual table properly' + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_column_privileges_for_current_role() RETURNS SETOF TEXT AS $$ +DECLARE + tab_id oid; +BEGIN +CREATE TABLE mytab (col1 varchar, col2 varchar); +tab_id := 'mytab'::regclass::oid; +CREATE ROLE test_intern1; +CREATE ROLE test_intern2; +GRANT USAGE ON SCHEMA msar, __msar TO test_intern1, test_intern2; +GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar TO test_intern1, test_intern2; +GRANT SELECT, INSERT (col1) ON TABLE mytab TO test_intern1; +GRANT SELECT (col2) ON TABLE mytab TO test_intern1; +GRANT UPDATE (col1) ON TABLE mytab TO test_intern2; +GRANT UPDATE, REFERENCES (col2) ON TABLE mytab TO test_intern2; + +RETURN NEXT is( + msar.list_column_privileges_for_current_role(tab_id, 1::smallint), + '["SELECT", "INSERT", "UPDATE", "REFERENCES"]' +); +RETURN NEXT is( + msar.list_column_privileges_for_current_role(tab_id, 2::smallint), + '["SELECT", "INSERT", "UPDATE", "REFERENCES"]' +); + +SET ROLE test_intern1; +RETURN NEXT is( + msar.list_column_privileges_for_current_role(tab_id, 1::smallint), + '["SELECT", "INSERT"]' +); +RETURN NEXT is( + msar.list_column_privileges_for_current_role(tab_id, 2::smallint), + '["SELECT"]' +); + +SET ROLE test_intern2; +RETURN NEXT is( + msar.list_column_privileges_for_current_role(tab_id, 1::smallint), + '["UPDATE"]' +); +RETURN NEXT is( + msar.list_column_privileges_for_current_role(tab_id, 2::smallint), + '["UPDATE", "REFERENCES"]' +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_schema_privileges_for_current_role() RETURNS SETOF TEXT AS $$ +DECLARE + sch_id oid; +BEGIN +CREATE SCHEMA restricted; +sch_id := 'restricted'::regnamespace::oid; +CREATE ROLE test_intern1; +CREATE ROLE test_intern2; +GRANT USAGE ON SCHEMA msar, __msar TO test_intern1, test_intern2; +GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar TO test_intern1, test_intern2; +GRANT USAGE ON SCHEMA restricted TO test_intern1; +GRANT USAGE, CREATE ON SCHEMA restricted TO test_intern2; + +RETURN NEXT is(msar.list_schema_privileges_for_current_role(sch_id), '["USAGE", "CREATE"]'); + +SET ROLE test_intern1; +RETURN NEXT is(msar.list_schema_privileges_for_current_role(sch_id), '["USAGE"]'); + +SET ROLE test_intern2; +RETURN NEXT is(msar.list_schema_privileges_for_current_role(sch_id), '["USAGE", "CREATE"]'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_table_privileges_for_current_role() RETURNS SETOF TEXT AS $$ +DECLARE + tab_id oid; +BEGIN +CREATE TABLE mytab (col1 varchar); +tab_id := 'mytab'::regclass::oid; +CREATE ROLE test_intern1; +CREATE ROLE test_intern2; +GRANT USAGE ON SCHEMA msar, __msar TO test_intern1, test_intern2; +GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar TO test_intern1, test_intern2; + +GRANT SELECT, INSERT, UPDATE ON TABLE mytab TO test_intern1; +GRANT DELETE, TRUNCATE, REFERENCES, TRIGGER ON TABLE mytab TO test_intern2; + +RETURN NEXT is( + msar.list_table_privileges_for_current_role(tab_id), + '["SELECT", "INSERT", "UPDATE", "DELETE", "TRUNCATE", "REFERENCES", "TRIGGER"]' +); + +SET ROLE test_intern1; +RETURN NEXT is( + msar.list_table_privileges_for_current_role(tab_id), + '["SELECT", "INSERT", "UPDATE"]' +); + +SET ROLE test_intern2; +RETURN NEXT is( + msar.list_table_privileges_for_current_role(tab_id), + '["DELETE", "TRUNCATE", "REFERENCES", "TRIGGER"]' +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_list_database_privileges_for_current_role() RETURNS SETOF TEXT AS $$ +DECLARE + dat_id oid := oid FROM pg_database WHERE datname=current_database(); +BEGIN +CREATE ROLE test_intern1; +CREATE ROLE test_intern2; +GRANT USAGE ON SCHEMA msar, __msar TO test_intern1, test_intern2; +GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA msar, __msar TO test_intern1, test_intern2; + +REVOKE ALL ON DATABASE mathesar_testing FROM PUBLIC; +GRANT CONNECT, CREATE ON DATABASE mathesar_testing TO test_intern1; +GRANT CONNECT, TEMPORARY ON DATABASE mathesar_testing TO test_intern2; + +RETURN NEXT is( + msar.list_database_privileges_for_current_role(dat_id), + '["CONNECT", "CREATE", "TEMPORARY"]' +); + +SET ROLE test_intern1; +RETURN NEXT is(msar.list_database_privileges_for_current_role(dat_id), '["CONNECT", "CREATE"]'); + +SET ROLE test_intern2; +RETURN NEXT is(msar.list_database_privileges_for_current_role(dat_id), '["CONNECT", "TEMPORARY"]'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_move_columns() RETURNS SETOF TEXT AS $$ +BEGIN +-- Authors ----------------------------------------------------------------------------------------- +CREATE TABLE "Authors" ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "First Name" text, + "Last Name" text, + "Website" text +); +INSERT INTO "Authors" OVERRIDING SYSTEM VALUE VALUES + (1, 'Edwin A.', 'Abbott', NULL), + (2, 'M.A.S.', 'Abdel Haleem', NULL), + (3, 'Joe', 'Abercrombie', 'https://joeabercrombie.com/'), + (4, 'Daniel', 'Abraham', 'https://www.danielabraham.com/'), + (5, NULL, 'Abu''l-Fazl', NULL); +PERFORM setval(pg_get_serial_sequence('"Authors"', 'id'), (SELECT max(id) FROM "Authors")); +-- colors ------------------------------------------------------------------------------------------ +CREATE TABLE colors (id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, name text); +INSERT INTO colors (name) VALUES ('red'), ('blue'); +-- fav_combos -------------------------------------------------------------------------------------- +CREATE TABLE fav_combos (number integer, color integer); +ALTER TABLE fav_combos ADD UNIQUE (number, color); +INSERT INTO fav_combos VALUES (5, 1), (5, 2), (10, 1), (10, 2); +-- Books ------------------------------------------------------------------------------------------- +CREATE TABLE "Books" ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "Title" text, + "Publication Year" date, + "ISBN" text, + "Dewey Decimal" text, + "Author" integer REFERENCES "Authors"(id), + "Publisher" integer, + "Favorite Number" integer, + "Favorite Color" integer REFERENCES colors(id) +); +ALTER TABLE "Books" DROP COLUMN "Publication Year"; +INSERT INTO "Books" OVERRIDING SYSTEM VALUE VALUES + (1059, 'The History of Akbar, Volume 7', '06-742-4416-8', NULL, 5, 116, 5, 1), + (960, 'The Dragon''s Path', '978-68173-11-59-3', '813.6', 4, 167, 5, 1), + (419, 'Half a King', '0007-55-020-0', '823.92', 3, 113, 5, 1), + (1047, 'The Heroes', '0-3-1604498-9', '823.92', 3, 167, 5, 1), + (103, 'Best Served Cold', '031604-49-5-4', '823.92', 3, 167, 5, 1), + (1302, 'The Widow''s House', '0-31-620398-X', '813.6', 4, 167, 5, NULL), + (99, 'Before They Are Hanged', '1-5910-2641-5', '823.92', 3, 195, 5, 2), + (530, 'Last Argument of Kings', '1591-02-690-3', '823.92', 3, 195, NULL, 1), + (104, 'Best Served Cold', '978-9552-8856-8-1', '823.92', 3, 167, 5, 1), + (1185, 'The Qur''an', '0-19-957071-X', '297.122521', 2, 171, 5, 1), + (1053, 'The History of Akbar, Volume 1', '0-674-42775-0', '954.02', 5, 116, 5, 1), + (959, 'The Dragon''s Path', '978-0-316080-68-2', '813.6', 4, 167, 5, 1), + (1056, 'The History of Akbar, Volume 4', '0-67497-503-0', NULL, 5, 116, 5, 1), + (69, 'A Shadow in Summer', '07-6-531340-5', '813.6', 4, 243, 5, 2), + (907, 'The Blade Itself', '978-1984-1-1636-1', '823.92', 3, 195, 5, 1), + (1086, 'The King''s Blood', '978-03-1608-077-4', '813.6', 4, 167, 5, 1), + (1060, 'The History of Akbar, Volume 8', '0-674-24417-6', NULL, 5, 116, 5, 1), + (70, 'A Shadow in Summer', '978-9-5-7802049-0', '813.6', 4, 243, 5, 2), + (1278, 'The Tyrant''s Law', '0-316-08070-5', '813.6', 4, 167, 5, 1), + (1054, 'The History of Akbar, Volume 2', '0-67-450494-1', NULL, 5, 116, 10, 1), + (1057, 'The History of Akbar, Volume 5', '0-6-7498395-5', NULL, 5, 116, 5, 1), + (351, 'Flatland: A Romance of Many Dimensions', '0-486-27263-X', '530.11', 1, 71, 5, 1), + (729, 'Red Country', '03161-87-20-8', '823.92', 3, 167, 5, 1), + (906, 'The Blade Itself', '1-591-02594-X', '823.92', 3, 195, 5, 1), + (1058, 'The History of Akbar, Volume 6', '067-4-98613-X', NULL, 5, 116, 10, 1), + (1055, 'The History of Akbar, Volume 3', '0-6-7465982-1', NULL, 5, 116, 5, 1); +PERFORM setval(pg_get_serial_sequence('"Books"', 'id'), (SELECT max(id) FROM "Books")); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_move_columns_nodata() RETURNS SETOF TEXT AS $$ +BEGIN +-- Authors ----------------------------------------------------------------------------------------- +CREATE TABLE "Authors" ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "First Name" text, + "Last Name" text, + "Website" text +); +-- colors ------------------------------------------------------------------------------------------ +CREATE TABLE colors (id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, name text); +-- fav_combos -------------------------------------------------------------------------------------- +CREATE TABLE fav_combos (number integer, color integer); +ALTER TABLE fav_combos ADD UNIQUE (number, color); +-- Books ------------------------------------------------------------------------------------------- +CREATE TABLE "Books" ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "Title" text, + "Publication Year" date, + "ISBN" text, + "Dewey Decimal" text, + "Author" integer REFERENCES "Authors"(id), + "Publisher" integer, + "Favorite Number" integer UNIQUE, + "Favorite Color" integer REFERENCES colors(id) +); +ALTER TABLE "Books" DROP COLUMN "Publication Year"; +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_move_columns_to_referenced_table_nodata() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_move_columns_nodata(); + PERFORM msar.move_columns_to_referenced_table( + '"Books"'::regclass, '"Authors"'::regclass, ARRAY[8, 9]::smallint[] + ); + RETURN NEXT columns_are( + 'Authors', + ARRAY['id', 'First Name', 'Last Name', 'Website', 'Favorite Number', 'Favorite Color'] + ); + RETURN NEXT columns_are( + 'Books', + ARRAY['id', 'Title', 'ISBN', 'Dewey Decimal', 'Author', 'Publisher'] + ); + RETURN NEXT col_is_unique('Authors', 'Favorite Number'); + RETURN NEXT fk_ok('Authors', 'Favorite Color', 'colors', 'id'); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_move_columns_to_referenced_table() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_move_columns(); + PERFORM msar.move_columns_to_referenced_table( + '"Books"'::regclass, '"Authors"'::regclass, ARRAY[8, 9]::smallint[] + ); + RETURN NEXT columns_are( + 'Authors', + ARRAY['id', 'First Name', 'Last Name', 'Website', 'Favorite Number', 'Favorite Color'] + ); + RETURN NEXT columns_are( + 'Books', + ARRAY['id', 'Title', 'ISBN', 'Dewey Decimal', 'Author', 'Publisher'] + ); + RETURN NEXT fk_ok('Authors', 'Favorite Color', 'colors', 'id'); + RETURN NEXT results_eq( + $h$SELECT * FROM "Authors" ORDER BY id;$h$, + $w$VALUES + (1, 'Edwin A.', 'Abbott', NULL, 5, 1), + (2, 'M.A.S.', 'Abdel Haleem', NULL, 5, 1), + (3, 'Joe', 'Abercrombie', 'https://joeabercrombie.com/', 5, 1), + (4, 'Daniel', 'Abraham', 'https://www.danielabraham.com/', 5, 1), + (5, NULL, 'Abu''l-Fazl', NULL, 5, 1), + (6, 'Joe', 'Abercrombie', 'https://joeabercrombie.com/', 5, 2), + (7, 'Joe', 'Abercrombie', 'https://joeabercrombie.com/',NULL,1), + (8, 'Daniel', 'Abraham', 'https://www.danielabraham.com/', 5, 2), + (9, 'Daniel', 'Abraham', 'https://www.danielabraham.com/', 5, NULL), + (10, NULL, 'Abu''l-Fazl', NULL, 10, 1); + $w$ + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_move_columns_multicol_fk() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE target_table( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + x text, + y integer +); +CREATE TABLE source_table( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + c1 integer, + c2 integer, + c3 integer, + c4 integer REFERENCES target_table(id), + UNIQUE (c1, c2) +); +CREATE TABLE t1 ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + a integer, + b integer, + c integer, + FOREIGN KEY (b, c) REFERENCES source_table (c1, c2) +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_move_columns_not_referenced_by_multicol_fk() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_move_columns_multicol_fk(); + PERFORM msar.move_columns_to_referenced_table( + 'source_table'::regclass, 'target_table'::regclass, ARRAY[4]::smallint[] + ); + RETURN NEXT columns_are( + 'source_table', + ARRAY['id', 'c1', 'c2', 'c4'] + ); + RETURN NEXT columns_are( + 'target_table', + ARRAY['id', 'x', 'y', 'c3'] + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_move_columns_referenced_by_multicol_fk() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_move_columns_multicol_fk(); + RETURN NEXT throws_ok( + $w$SELECT msar.move_columns_to_referenced_table( + 'source_table'::regclass, 'target_table'::regclass, ARRAY[2, 3, 4]::smallint[] + );$w$, + '2BP01', + 'cannot drop column c1 of table source_table because other objects depend on it' + ); + RETURN NEXT columns_are( + 'source_table', + ARRAY['id', 'c1', 'c2', 'c3', 'c4'] + ); + RETURN NEXT columns_are( + 'target_table', + ARRAY['id', 'x', 'y'] + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION __setup_move_columns_singlecol_fk() RETURNS SETOF TEXT AS $$ +BEGIN +CREATE TABLE target_table( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + x text, + y integer +); +CREATE TABLE source_table( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + c1 integer, + c2 integer REFERENCES target_table(id), + UNIQUE (c1) +); +CREATE TABLE t1 ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + a integer, + b integer, + FOREIGN KEY (b) REFERENCES source_table (c1) +); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_move_columns_referenced_by_singlecol_fk() RETURNS SETOF TEXT AS $$ +BEGIN + PERFORM __setup_move_columns_singlecol_fk(); + RETURN NEXT throws_ok( + $w$SELECT msar.move_columns_to_referenced_table( + 'source_table'::regclass, 'target_table'::regclass, ARRAY[2]::smallint[] + );$w$, + '2BP01', + 'cannot drop column c1 of table source_table because other objects depend on it' + ); + RETURN NEXT columns_are( + 'source_table', + ARRAY['id', 'c1', 'c2'] + ); + RETURN NEXT columns_are( + 'target_table', + ARRAY['id', 'x', 'y'] + ); +END; +$$ LANGUAGE plpgsql; + + +CREATE OR REPLACE FUNCTION test_build_grant_revoke_membership_expr() RETURNS SETOF TEXT AS $$ +BEGIN + CREATE USER "Alice"; + CREATE USER "Bob"; + CREATE USER carol; + RETURN NEXT is( + msar.build_grant_membership_expr('"Alice"'::regrole::oid, ARRAY['"Bob"'::regrole::oid]), + E'GRANT "Alice" TO "Bob";\n' + ); + RETURN NEXT is( + msar.build_grant_membership_expr( + '"Alice"'::regrole::oid, ARRAY['"Bob"'::regrole::oid, 'carol'::regrole::oid] + ), + E'GRANT "Alice" TO "Bob";\nGRANT "Alice" TO carol;\n' + ); + + RETURN NEXT is( + msar.build_revoke_membership_expr('"Alice"'::regrole::oid, ARRAY['"Bob"'::regrole::oid]), + E'REVOKE "Alice" FROM "Bob";\n' + ); + RETURN NEXT is( + msar.build_revoke_membership_expr( + '"Alice"'::regrole::oid, ARRAY['"Bob"'::regrole::oid, 'carol'::regrole::oid] + ), + E'REVOKE "Alice" FROM "Bob";\nREVOKE "Alice" FROM carol;\n' + ); +END; +$$ LANGUAGE plpgsql; diff --git a/db/sql/test_0_msar.sql b/db/sql/test_0_msar.sql deleted file mode 100644 index e21f4712f6..0000000000 --- a/db/sql/test_0_msar.sql +++ /dev/null @@ -1,2081 +0,0 @@ -DROP EXTENSION IF EXISTS pgtap CASCADE; -CREATE EXTENSION IF NOT EXISTS pgtap; - --- msar.drop_columns ------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION setup_drop_columns() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE atable (dodrop1 integer, dodrop2 integer, dontdrop text); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_columns_oid() RETURNS SETOF TEXT AS $$ -DECLARE - rel_id oid; -BEGIN - rel_id := 'atable'::regclass::oid; - PERFORM msar.drop_columns(rel_id, 1, 2); - RETURN NEXT has_column( - 'atable', 'dontdrop', 'Keeps correct columns' - ); - RETURN NEXT hasnt_column( - 'atable', 'dodrop1', 'Drops correct columns 1' - ); - RETURN NEXT hasnt_column( - 'atable', 'dodrop2', 'Drops correct columns 2' - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_columns_names() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.drop_columns('public', 'atable', 'dodrop1', 'dodrop2'); - RETURN NEXT has_column( - 'atable', 'dontdrop', 'Dropper keeps correct columns' - ); - RETURN NEXT hasnt_column( - 'atable', 'dodrop1', 'Dropper drops correct columns 1' - ); - RETURN NEXT hasnt_column( - 'atable', 'dodrop2', 'Dropper drops correct columns 2' - ); -END; -$$ LANGUAGE plpgsql; - - --- msar.drop_table --------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION setup_drop_tables() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE dropme (id SERIAL PRIMARY KEY, col1 integer); -END; -$$ LANGUAGE plpgsql; - -CREATE OR REPLACE FUNCTION test_drop_table_oid() RETURNS SETOF TEXT AS $$ -DECLARE - rel_id oid; -BEGIN - rel_id := 'dropme'::regclass::oid; - PERFORM msar.drop_table(tab_id => rel_id, cascade_ => false, if_exists => false); - RETURN NEXT hasnt_table('dropme', 'Drops table'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_table_oid_if_exists() RETURNS SETOF TEXT AS $$ -DECLARE - rel_id oid; -BEGIN - rel_id := 'dropme'::regclass::oid; - PERFORM msar.drop_table(tab_id => rel_id, cascade_ => false, if_exists => true); - RETURN NEXT hasnt_table('dropme', 'Drops table with IF EXISTS'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_table_oid_restricted_fkey() RETURNS SETOF TEXT AS $$ -DECLARE - rel_id oid; -BEGIN - rel_id := 'dropme'::regclass::oid; - CREATE TABLE - dependent (id SERIAL PRIMARY KEY, col1 integer REFERENCES dropme); - RETURN NEXT throws_ok( - format('SELECT msar.drop_table(tab_id => %s, cascade_ => false, if_exists => true);', rel_id), - '2BP01', - 'cannot drop table dropme because other objects depend on it', - 'Table dropper throws for dependent objects' - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_table_oid_cascade_fkey() RETURNS SETOF TEXT AS $$ -DECLARE - rel_id oid; -BEGIN - rel_id := 'dropme'::regclass::oid; - CREATE TABLE - dependent (id SERIAL PRIMARY KEY, col1 integer REFERENCES dropme); - PERFORM msar.drop_table(tab_id => rel_id, cascade_ => true, if_exists => false); - RETURN NEXT hasnt_table('dropme', 'Drops table with dependent using CASCADE'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_table_name() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.drop_table( - sch_name => 'public', - tab_name => 'dropme', - cascade_ => false, - if_exists => false - ); - RETURN NEXT hasnt_table('dropme', 'Drops table'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_table_name_missing_if_exists() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.drop_table( - sch_name => 'public', - tab_name => 'dropmenew', - cascade_ => false, - if_exists => true - ); - RETURN NEXT has_table('dropme', 'Drops table with IF EXISTS'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_table_name_missing_no_if_exists() RETURNS SETOF TEXT AS $$ -BEGIN - RETURN NEXT throws_ok( - 'SELECT msar.drop_table(''public'', ''doesntexist'', false, false);', - '42P01', - 'table "doesntexist" does not exist', - 'Table dropper throws for missing table' - ); -END; -$$ LANGUAGE plpgsql; - - --- msar.build_type_text ---------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION test_build_type_text() RETURNS SETOF TEXT AS $$/* -Note that many type building tests are in the column adding section, to make sure the strings the -function writes are as expected, and also valid type definitions. -*/ - -BEGIN - RETURN NEXT is(msar.build_type_text('{}'), 'text'); - RETURN NEXT is(msar.build_type_text(null), 'text'); - RETURN NEXT is(msar.build_type_text('{"name": "varchar"}'), 'character varying'); - CREATE DOMAIN msar.testtype AS text CHECK (value LIKE '%test'); - RETURN NEXT is( - msar.build_type_text('{"schema": "msar", "name": "testtype"}'), 'msar.testtype' - ); -END; -$$ LANGUAGE plpgsql; - - --- msar.process_col_def_jsonb ---------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION test_process_col_def_jsonb() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT is( - msar.process_col_def_jsonb(0, '[{}, {}]'::jsonb, false), - ARRAY[ - ('"Column 1"', 'text', null, null, false, null), - ('"Column 2"', 'text', null, null, false, null) - ]::__msar.col_def[], - 'Empty columns should result in defaults' - ); - RETURN NEXT is( - msar.process_col_def_jsonb(0, '[{"name": "id"}]'::jsonb, false), - null, - 'Column definition processing should ignore "id" column' - ); - RETURN NEXT is( - msar.process_col_def_jsonb(0, '[{}, {}]'::jsonb, false, true), - ARRAY[ - ('id', 'integer', true, null, true, 'Mathesar default ID column'), - ('"Column 1"', 'text', null, null, false, null), - ('"Column 2"', 'text', null, null, false, null) - ]::__msar.col_def[], - 'Column definition processing add "id" column' - ); - RETURN NEXT is( - msar.process_col_def_jsonb(0, '[{"description": "Some comment"}]'::jsonb, false), - ARRAY[ - ('"Column 1"', 'text', null, null, false, '''Some comment''') - ]::__msar.col_def[], - 'Comments should be sanitized' - ); -END; -$f$ LANGUAGE plpgsql; - - --- msar.add_columns -------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION setup_add_columns() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE add_col_testable (id serial primary key, col1 integer, col2 varchar); -END; -$$ LANGUAGE plpgsql; - - --- TODO: Figure out a way to parameterize these -CREATE OR REPLACE FUNCTION test_add_columns_fullspec_text() RETURNS SETOF TEXT AS $f$ -DECLARE - col_create_arr jsonb := $j$[ - {"name": "tcol", "type": {"name": "text"}, "not_null": true, "default": "my super default"} - ]$j$; -BEGIN - RETURN NEXT is( - msar.add_columns('add_col_testable'::regclass::oid, col_create_arr), '{4}'::smallint[] - ); - RETURN NEXT col_not_null('add_col_testable', 'tcol'); - RETURN NEXT col_type_is('add_col_testable', 'tcol', 'text'); - RETURN NEXT col_default_is('add_col_testable', 'tcol', 'my super default'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_minspec_text() RETURNS SETOF TEXT AS $f$ -/* -This tests the default settings. When not given, the defautl column should be nullable and have no -default value. The name should be "Column ", where is the attnum of the added column. -*/ -DECLARE - col_create_arr jsonb := '[{"type": {"name": "text"}}]'; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); - RETURN NEXT col_is_null('add_col_testable', 'Column 4'); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'text'); - RETURN NEXT col_hasnt_default('add_col_testable', 'Column 4'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_comment() RETURNS SETOF TEXT AS $f$ -DECLARE - col_name text := 'tcol'; - description text := 'Some; comment with a semicolon'; - tab_id integer := 'add_col_testable'::regclass::oid; - col_id integer; - col_create_arr jsonb; -BEGIN - col_create_arr := format('[{"name": "%s", "description": "%s"}]', col_name, description); - PERFORM msar.add_columns(tab_id, col_create_arr); - col_id := msar.get_attnum(tab_id, col_name); - RETURN NEXT is( - msar.col_description(tab_id, col_id), - description - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_multi_default_name() RETURNS SETOF TEXT AS $f$ -/* -This tests the default settings. When not given, the defautl column should be nullable and have no -default value. The name should be "Column ", where is the attnum of the added column. -*/ -DECLARE - col_create_arr jsonb := '[{"type": {"name": "text"}}, {"type": {"name": "numeric"}}]'; -BEGIN - RETURN NEXT is( - msar.add_columns('add_col_testable'::regclass::oid, col_create_arr), '{4, 5}'::smallint[] - ); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'text'); - RETURN NEXT col_type_is('add_col_testable', 'Column 5', 'numeric'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_numeric_def() RETURNS SETOF TEXT AS $f$ -DECLARE - col_create_arr jsonb := '[{"type": {"name": "numeric"}, "default": 3.14159}]'; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'numeric'); - RETURN NEXT col_default_is('add_col_testable', 'Column 4', 3.14159); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_numeric_prec() RETURNS SETOF TEXT AS $f$ -DECLARE - col_create_arr jsonb := '[{"type": {"name": "numeric", "options": {"precision": 3}}}]'; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'numeric(3,0)'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_numeric_prec_scale() RETURNS SETOF TEXT AS $f$ -DECLARE - col_create_arr jsonb := $j$[ - {"type": {"name": "numeric", "options": {"precision": 3, "scale": 2}}} - ]$j$; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'numeric(3,2)'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_caps_numeric() RETURNS SETOF TEXT AS $f$ -DECLARE - col_create_arr jsonb := '[{"type": {"name": "NUMERIC"}}]'; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'numeric'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_varchar_length() RETURNS SETOF TEXT AS $f$ -DECLARE - col_create_arr jsonb := '[{"type": {"name": "varchar", "options": {"length": 128}}}]'; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'character varying(128)'); -END; -$f$ LANGUAGE plpgsql; - -CREATE OR REPLACE FUNCTION test_add_columns_interval_precision() RETURNS SETOF TEXT AS $f$ -DECLARE - col_create_arr jsonb := '[{"type": {"name": "interval", "options": {"precision": 6}}}]'; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'interval(6)'); -END; -$f$ LANGUAGE plpgsql; - - --- upstream pgTAP bug: https://github.com/theory/pgtap/issues/315 --- CREATE OR REPLACE FUNCTION test_add_columns_interval_fields() RETURNS SETOF TEXT AS $f$ --- DECLARE --- col_create_arr jsonb := '[{"type": {"name": "interval", "options": {"fields": "year"}}}]'; --- BEGIN --- PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); --- RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'interval year'); --- END; --- $f$ LANGUAGE plpgsql; --- --- --- CREATE OR REPLACE FUNCTION test_add_columns_interval_fields_prec() RETURNS SETOF TEXT AS $f$ --- DECLARE --- col_create_arr jsonb := $j$ --- [{"type": {"name": "interval", "options": {"fields": "second", "precision": 3}}}] --- $j$; --- BEGIN --- PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); --- RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'interval second(3)'); --- END; --- $f$ LANGUAGE plpgsql; --- --- --- CREATE OR REPLACE FUNCTION test_add_columns_timestamp_prec() RETURNS SETOF TEXT AS $f$ --- DECLARE --- col_create_arr jsonb := $j$ --- [{"type": {"name": "timestamp", "options": {"precision": 3}}}] --- $j$; --- BEGIN --- PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr); --- RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'timestamp(3) without time zone'); --- END; --- $f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_timestamp_raw_default() RETURNS SETOF TEXT AS $f$ -/* -This test will fail if the default is being sanitized, but will succeed if it's not. -*/ -DECLARE - col_create_arr jsonb := '[{"type": {"name": "timestamp"}, "default": "now()::timestamp"}]'; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr, raw_default => true); - RETURN NEXT col_type_is('add_col_testable', 'Column 4', 'timestamp without time zone'); - RETURN NEXT col_default_is( - 'add_col_testable', 'Column 4', '(now())::timestamp without time zone' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_sanitize_default() RETURNS SETOF TEXT AS $f$ -/* -This test will succeed if the default is being sanitized, but will fail if it's not. - -It's important to check that we're careful with SQL submitted from python. -*/ -DECLARE - col_create_arr jsonb := $j$ - [{"type": {"name": "text"}, "default": "null; drop table add_col_testable"}] - $j$; -BEGIN - PERFORM msar.add_columns('add_col_testable'::regclass::oid, col_create_arr, raw_default => false); - RETURN NEXT has_table('add_col_testable'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_columns_errors() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT throws_ok( - format( - 'SELECT msar.add_columns(tab_id => %s, col_defs => ''%s'');', - 'add_col_testable'::regclass::oid, - '[{"type": {"name": "taxt"}}]'::jsonb - ), - '42704', - 'type "taxt" does not exist' - ); - RETURN NEXT CASE WHEN pg_version_num() < 150000 - THEN throws_ok( - format( - 'SELECT msar.add_columns(tab_id => %s, col_defs => ''%s'');', - 'add_col_testable'::regclass::oid, - '[{"type": {"name": "numeric", "options": {"scale": 23, "precision": 3}}}]'::jsonb - ), - '22023', - 'NUMERIC scale 23 must be between 0 and precision 3' - ) - ELSE skip('Numeric scale can be negative or greater than precision as of v15') - END; -END; -$f$ LANGUAGE plpgsql; - - --- msar.copy_column -------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION setup_copy_column() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE copy_coltest ( - id SERIAL PRIMARY KEY, - col1 varchar, - col2 varchar NOT NULL, - col3 numeric(5, 3) DEFAULT 5, - col4 timestamp without time zone DEFAULT NOW(), - col5 timestamp without time zone NOT NULL DEFAULT NOW(), - col6 interval second(3), - "col space" varchar - ); - ALTER TABLE copy_coltest ADD UNIQUE (col1, col2); - INSERT INTO copy_coltest VALUES - (DEFAULT, 'abc', 'def', 5.234, '1999-01-08 04:05:06', '1999-01-09 04:05:06', '4:05:06', 'ghi'), - (DEFAULT, 'jkl', 'mno', null, null, '1999-02-08 04:05:06', '3 4:05:07', 'pqr'), - (DEFAULT, null, 'stu', DEFAULT, DEFAULT, DEFAULT, null, 'vwx') - ; -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_copies_unique() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 2::smallint, 'col1 supercopy', true, true - ); - RETURN NEXT col_type_is('copy_coltest', 'col1 supercopy', 'character varying'); - RETURN NEXT col_is_null('copy_coltest', 'col1 supercopy'); - RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1', 'col2']); - RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1 supercopy', 'col2']); - RETURN NEXT results_eq( - 'SELECT "col1 supercopy" FROM copy_coltest ORDER BY id', - $v$VALUES ('abc'::varchar), ('jkl'::varchar), (null)$v$ - ); - RETURN NEXT lives_ok( - $u$UPDATE copy_coltest SET "col1 supercopy"='abc' WHERE "col1 supercopy"='jkl'$u$, - 'Copied col should not have a single column unique constraint' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_copies_unique_and_nnull() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 3::smallint, null, true, true - ); - RETURN NEXT col_type_is('copy_coltest', 'col2 1', 'character varying'); - RETURN NEXT col_not_null('copy_coltest', 'col2 1'); - RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1', 'col2']); - RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1', 'col2 1']); - RETURN NEXT results_eq( - 'SELECT "col2 1" FROM copy_coltest', - $v$VALUES ('def'::varchar), ('mno'::varchar), ('stu'::varchar)$v$ - ); - RETURN NEXT lives_ok( - $u$UPDATE copy_coltest SET "col2 1"='def' WHERE "col2 1"='mno'$u$, - 'Copied col should not have a single column unique constraint' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_false_copy_data_and_con() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 3::smallint, null, false, false - ); - RETURN NEXT col_type_is('copy_coltest', 'col2 1', 'character varying'); - RETURN NEXT col_is_null('copy_coltest', 'col2 1'); - RETURN NEXT col_is_unique('copy_coltest', ARRAY['col1', 'col2']); - RETURN NEXT results_eq( - 'SELECT "col2 1" FROM copy_coltest', - $v$VALUES (null::varchar), (null::varchar), (null::varchar)$v$ - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_num_options_static_default() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 4::smallint, null, true, false - ); - RETURN NEXT col_type_is('copy_coltest', 'col3 1', 'numeric(5,3)'); - RETURN NEXT col_is_null('copy_coltest', 'col3 1'); - RETURN NEXT col_default_is('copy_coltest', 'col3 1', '5'); - RETURN NEXT results_eq( - 'SELECT "col3 1" FROM copy_coltest', - $v$VALUES (5.234), (null), (5)$v$ - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_nullable_dynamic_default() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 5::smallint, null, true, false - ); - RETURN NEXT col_type_is('copy_coltest', 'col4 1', 'timestamp without time zone'); - RETURN NEXT col_is_null('copy_coltest', 'col4 1'); - RETURN NEXT col_default_is('copy_coltest', 'col4 1', 'now()'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_non_null_dynamic_default() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 6::smallint, null, true, true - ); - RETURN NEXT col_type_is('copy_coltest', 'col5 1', 'timestamp without time zone'); - RETURN NEXT col_not_null('copy_coltest', 'col5 1'); - RETURN NEXT col_default_is('copy_coltest', 'col5 1', 'now()'); -END; -$f$ LANGUAGE plpgsql; - - --- upstream pgTAP bug: https://github.com/theory/pgtap/issues/315 --- CREATE OR REPLACE FUNCTION test_copy_column_interval_notation() RETURNS SETOF TEXT AS $f$ --- BEGIN --- PERFORM msar.copy_column( --- 'copy_coltest'::regclass::oid, 7::smallint, null, false, false --- ); --- RETURN NEXT col_type_is('copy_coltest', 'col6 1', 'interval second(3)'); --- END; --- $f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_space_name() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 8::smallint, null, false, false - ); - RETURN NEXT col_type_is('copy_coltest', 'col space 1', 'character varying'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_pkey() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 1::smallint, null, true, true - ); - RETURN NEXT col_type_is('copy_coltest', 'id 1', 'integer'); - RETURN NEXT col_not_null('copy_coltest', 'id 1'); - RETURN NEXT col_default_is( - 'copy_coltest', 'id 1', $d$nextval('copy_coltest_id_seq'::regclass)$d$ - ); - RETURN NEXT col_is_pk('copy_coltest', 'id'); - RETURN NEXT col_isnt_pk('copy_coltest', 'id 1'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_column_increment_name() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 2::smallint, null, true, true - ); - RETURN NEXT has_column('copy_coltest', 'col1 1'); - PERFORM msar.copy_column( - 'copy_coltest'::regclass::oid, 2::smallint, null, true, true - ); - RETURN NEXT has_column('copy_coltest', 'col1 2'); -END; -$f$ LANGUAGE plpgsql; - --- msar.add_constraints ---------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION setup_add_pkey() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE add_pkeytest (col1 serial, col2 serial, col3 text); - INSERT INTO add_pkeytest (col1, col2, col3) VALUES - (DEFAULT, DEFAULT, 'abc'), - (DEFAULT, DEFAULT, 'def'), - (DEFAULT, DEFAULT, 'abc'), - (DEFAULT, DEFAULT, 'def'), - (DEFAULT, DEFAULT, 'abc'), - (DEFAULT, DEFAULT, 'def'), - (DEFAULT, DEFAULT, 'abc'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_pkey_id_fullspec() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := $j$[ - {"name": "mysuperkey", "type": "p", "columns": [1], "deferrable": true} - ]$j$; - created_name text; - deferrable_ boolean; -BEGIN - PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); - RETURN NEXT col_is_pk('add_pkeytest', 'col1'); - created_name := conname FROM pg_constraint - WHERE conrelid='add_pkeytest'::regclass::oid AND conkey='{1}'; - RETURN NEXT is(created_name, 'mysuperkey'); - deferrable_ := condeferrable FROM pg_constraint WHERE conname='mysuperkey'; - RETURN NEXT is(deferrable_, true); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_pkey_id_defname() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"type": "p", "columns": [1]}]'; - created_name text; -BEGIN - PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); - RETURN NEXT col_is_pk('add_pkeytest', 'col1'); - created_name := conname FROM pg_constraint - WHERE conrelid='add_pkeytest'::regclass::oid AND conkey='{1}'; - RETURN NEXT is(created_name, 'add_pkeytest_pkey'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_pkey_id_multicol() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"type": "p", "columns": [1, 2]}]'; - created_name text; -BEGIN - PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); - RETURN NEXT col_is_pk('add_pkeytest', ARRAY['col1', 'col2']); - created_name := conname FROM pg_constraint - WHERE conrelid='add_pkeytest'::regclass::oid AND conkey='{1, 2}'; - RETURN NEXT is(created_name, 'add_pkeytest_pkey'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_pkey_tab_name_singlecol() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"type": "p", "columns": [1]}]'; -BEGIN - PERFORM msar.add_constraints('public', 'add_pkeytest', con_create_arr); - RETURN NEXT col_is_pk('add_pkeytest', 'col1'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_pkey_col_name_singlecol() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"type": "p", "columns": ["col1"]}]'; -BEGIN - PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); - RETURN NEXT col_is_pk('add_pkeytest', 'col1'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_pkey_col_name_multicol() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"type": "p", "columns": ["col1", "col2"]}]'; -BEGIN - PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); - RETURN NEXT col_is_pk('add_pkeytest', ARRAY['col1', 'col2']); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_pkey_col_mix_multicol() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"type": "p", "columns": [1, "col2"]}]'; -BEGIN - PERFORM msar.add_constraints('add_pkeytest'::regclass::oid, con_create_arr); - RETURN NEXT col_is_pk('add_pkeytest', ARRAY['col1', 'col2']); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_add_fkey() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE add_fk_users (id serial primary key, fname TEXT, lname TEXT, phoneno TEXT); - INSERT INTO add_fk_users (fname, lname, phoneno) VALUES - ('alice', 'smith', '123 4567'), - ('bob', 'jones', '234 5678'), - ('eve', 'smith', '345 6789'); - CREATE TABLE add_fk_comments (id serial primary key, user_id integer, comment text); - INSERT INTO add_fk_comments (user_id, comment) VALUES - (1, 'aslfkjasfdlkjasdfl'), - (2, 'aslfkjasfdlkjasfl'), - (3, 'aslfkjasfdlkjsfl'), - (1, 'aslfkjasfdlkasdfl'), - (2, 'aslfkjasfkjasdfl'), - (2, 'aslfkjasflkjasdfl'), - (3, 'aslfkjasfdjasdfl'), - (1, 'aslfkjasfkjasdfl'), - (1, 'fkjasfkjasdfl'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_fkey_id_fullspec() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb; -BEGIN - con_create_arr := format( - $j$[ - { - "name": "superfkey", - "type": "f", - "columns": [2], - "fkey_relation_id": %s, - "fkey_columns": [1], - "fkey_update_action": "a", - "fkey_delete_action": "a", - "fkey_match_type": "f" - } - ]$j$, 'add_fk_users'::regclass::oid - ); - PERFORM msar.add_constraints('add_fk_comments'::regclass::oid, con_create_arr); - RETURN NEXT fk_ok( - 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' - ); - RETURN NEXT results_eq( - $h$ - SELECT conname, confupdtype, confdeltype, confmatchtype - FROM pg_constraint WHERE conname='superfkey' - $h$, - $w$VALUES ('superfkey'::name, 'a'::"char", 'a'::"char", 'f'::"char")$w$ - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION fkey_options_eq("char", "char", "char") RETURNS TEXT AS $f$ -DECLARE - con_create_arr jsonb; -BEGIN - con_create_arr := format( - $j$[ - { - "name": "superfkey", - "type": "f", - "columns": [2], - "fkey_relation_id": %s, - "fkey_update_action": "%s", - "fkey_delete_action": "%s", - "fkey_match_type": "%s" - } - ]$j$, - 'add_fk_users'::regclass::oid, $1, $2, $3 - ); - PERFORM msar.add_constraints('add_fk_comments'::regclass::oid, con_create_arr); - RETURN results_eq( - $h$ - SELECT conname, confupdtype, confdeltype, confmatchtype - FROM pg_constraint WHERE conname='superfkey' - $h$, - format( - $w$VALUES ('superfkey'::name, '%s'::"char", '%s'::"char", '%s'::"char")$w$, - $1, $2, $3 - ), - format('Should have confupdtype %s, confdeltype %s, and confmatchtype %s', $1, $2, $3) - ); -END; -$f$ LANGUAGE plpgsql; - - --- Options for fkey delete, update action and match type --- a = no action, r = restrict, c = cascade, n = set null, d = set default --- f = full, s = simple --- Note that partial match is not implemented. - - -CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_aas() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT fkey_options_eq('a', 'a', 's'); - RETURN NEXT fk_ok( - 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_arf() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT fkey_options_eq('a', 'r', 'f'); - RETURN NEXT fk_ok( - 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_rrf() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT fkey_options_eq('r', 'r', 'f'); - RETURN NEXT fk_ok( - 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_rrf() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT fkey_options_eq('r', 'r', 'f'); - RETURN NEXT fk_ok( - 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_ccf() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT fkey_options_eq('c', 'c', 'f'); - RETURN NEXT fk_ok( - 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_nnf() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT fkey_options_eq('n', 'n', 'f'); - RETURN NEXT fk_ok( - 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraints_fkey_opts_ddf() RETURNS SETOF TEXT AS $f$ -BEGIN - RETURN NEXT fkey_options_eq('d', 'd', 'f'); - RETURN NEXT fk_ok( - 'public', 'add_fk_comments', 'user_id', 'public', 'add_fk_users', 'id' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_add_unique() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE add_unique_con (id serial primary key, col1 integer, col2 integer, col3 integer); - INSERT INTO add_unique_con (col1, col2, col3) VALUES - (1, 1, 1), - (2, 2, 3), - (3, 3, 3); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraints_unique_single() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"name": "myuniqcons", "type": "u", "columns": [2]}]'; -BEGIN - PERFORM msar.add_constraints('add_unique_con'::regclass::oid, con_create_arr); - RETURN NEXT col_is_unique('add_unique_con', ARRAY['col1']); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraints_unique_multicol() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"name": "myuniqcons", "type": "u", "columns": [2, 3]}]'; -BEGIN - PERFORM msar.add_constraints('add_unique_con'::regclass::oid, con_create_arr); - RETURN NEXT col_is_unique('add_unique_con', ARRAY['col1', 'col2']); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_duplicate_name() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"name": "myuniqcons", "type": "u", "columns": [2]}]'; - con_create_arr2 jsonb := '[{"name": "myuniqcons", "type": "u", "columns": [3]}]'; -BEGIN - PERFORM msar.add_constraints('add_unique_con'::regclass::oid, con_create_arr); - RETURN NEXT throws_ok( - format( - 'SELECT msar.add_constraints(%s, ''%s'');', 'add_unique_con'::regclass::oid, con_create_arr - ), - '42P07', - 'relation "myuniqcons" already exists', - 'Throws error for duplicate constraint name' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_copy_unique() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE copy_unique_con - (id serial primary key, col1 integer, col2 integer, col3 integer, col4 integer); - ALTER TABLE copy_unique_con ADD CONSTRAINT olduniqcon UNIQUE (col1, col2, col3); - INSERT INTO copy_unique_con (col1, col2, col3, col4) VALUES - (1, 2, 5, 9), - (2, 3, 6, 0), - (3, 4, 8, 1); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_copy_constraint() RETURNS SETOF TEXT AS $f$ -DECLARE - orig_oid oid; -BEGIN - orig_oid := oid - FROM pg_constraint - WHERE conrelid='copy_unique_con'::regclass::oid AND conname='olduniqcon'; - PERFORM msar.copy_constraint(orig_oid, 4::smallint, 5::smallint); - RETURN NEXT col_is_unique('copy_unique_con', ARRAY['col1', 'col2', 'col4']); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_constraint_errors() RETURNS SETOF TEXT AS $f$ -DECLARE - con_create_arr jsonb := '[{"type": "p", "columns": [7]}]'::jsonb; -BEGIN - RETURN NEXT throws_ok( - format( - 'SELECT msar.add_constraints(%s, ''%s'');', - 'add_pkeytest'::regclass::oid, - '[{"type": "p", "columns": [7]}]'::jsonb - ), - '42601', - 'syntax error at end of input', - 'Throws error for nonexistent attnum' - ); - RETURN NEXT throws_ok( - format( - 'SELECT msar.add_constraints(%s, ''%s'');', 234, '[{"type": "p", "columns": [1]}]'::jsonb - ), - '42601', - 'syntax error at or near "234"', - 'Throws error for nonexistent table ID' - ); - RETURN NEXT throws_ok( - format( - 'SELECT msar.add_constraints(%s, ''%s'');', - 'add_pkeytest'::regclass::oid, - '[{"type": "k", "columns": [1]}]'::jsonb - ), - '42601', - 'syntax error at end of input', - 'Throws error for nonexistent constraint type' - ); - RETURN NEXT throws_ok( - format( - 'SELECT msar.add_constraints(%s, ''%s'');', - 'add_pkeytest'::regclass::oid, - '[{"type": "p", "columns": [1, "col1"]}]'::jsonb - ), - '42701', - 'column "col1" appears twice in primary key constraint', - 'Throws error for nonexistent duplicate pkey col' - ); -END; -$f$ LANGUAGE plpgsql; - - --- msar.drop_constraint --------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION setup_drop_constraint() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE category( - id serial primary key, - item_category text, - CONSTRAINT uq_cat UNIQUE(item_category) - ); - CREATE TABLE orders ( - id serial primary key, - item_name text, - price integer, - category_id integer, - CONSTRAINT fk_cat FOREIGN KEY(category_id) REFERENCES category(id) - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_constraint() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.drop_constraint( - sch_name => 'public', - tab_name => 'category', - con_name => 'uq_cat' - ); - PERFORM msar.drop_constraint( - sch_name => 'public', - tab_name => 'orders', - con_name => 'fk_cat' - ); - /* There isn't a col_isnt_unique function in pgTAP so we are improvising - by adding 2 same values here.*/ - INSERT INTO category(item_category) VALUES ('tech'),('tech'); - RETURN NEXT col_isnt_fk('orders', 'category_id'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_constraint_using_oid() RETURNS SETOF TEXT AS $$ -DECLARE - uq_cat_oid oid; - fk_cat_oid oid; -BEGIN - uq_cat_oid := oid FROM pg_constraint WHERE conname='uq_cat'; - fk_cat_oid := oid FROM pg_constraint WHERE conname='fk_cat'; - PERFORM msar.drop_constraint( - tab_id => 'category'::regclass::oid, - con_id => uq_cat_oid - ); - PERFORM msar.drop_constraint( - tab_id => 'orders'::regclass::oid, - con_id => fk_cat_oid - ); - /* There isn't a col_isnt_unique function in pgTAP so we are improvising - by adding 2 same values here.*/ - INSERT INTO category(item_category) VALUES ('tech'),('tech'); - RETURN NEXT col_isnt_fk('orders', 'category_id'); -END; -$$ LANGUAGE plpgsql; - - --- msar.create_link ------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION setup_link_tables() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE actors (id SERIAL PRIMARY KEY, actor_name text); - INSERT INTO actors(actor_name) VALUES - ('Cillian Murphy'), - ('Leonardo DiCaprio'), - ('Margot Robbie'), - ('Ryan Gosling'), - ('Ana de Armas'); - CREATE TABLE movies (id SERIAL PRIMARY KEY, movie_name text); - INSERT INTO movies(movie_name) VALUES - ('The Wolf of Wall Street'), - ('Inception'), - ('Oppenheimer'), - ('Barbie'), - ('Blade Runner 2049'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_create_many_to_one_link() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.create_many_to_one_link( - frel_id => 'actors'::regclass::oid, - rel_id => 'movies'::regclass::oid, - col_name => 'act_id' - ); - RETURN NEXT has_column('movies', 'act_id'); - RETURN NEXT col_type_is('movies', 'act_id', 'integer'); - RETURN NEXT col_is_fk('movies', 'act_id'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_create_one_to_one_link() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.create_many_to_one_link( - frel_id => 'actors'::regclass::oid, - rel_id => 'movies'::regclass::oid, - col_name => 'act_id', - unique_link => true - ); - RETURN NEXT has_column('movies', 'act_id'); - RETURN NEXT col_type_is('movies', 'act_id', 'integer'); - RETURN NEXT col_is_fk('movies', 'act_id'); - RETURN NEXT col_is_unique('movies', 'act_id'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_create_many_to_many_link() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.create_many_to_many_link( - sch_id => 'public'::regnamespace::oid, - tab_name => 'movies_actors', - from_rel_ids => '{}'::oid[] || 'movies'::regclass::oid || 'actors'::regclass::oid, - col_names => '{"movie_id", "actor_id"}'::text[] - ); - RETURN NEXT has_table('public'::name, 'movies_actors'::name); - RETURN NEXT has_column('movies_actors', 'movie_id'); - RETURN NEXT col_type_is('movies_actors', 'movie_id', 'integer'); - RETURN NEXT col_is_fk('movies_actors', 'movie_id'); - RETURN NEXT has_column('movies_actors', 'actor_id'); - RETURN NEXT col_type_is('movies_actors', 'actor_id', 'integer'); - RETURN NEXT col_is_fk('movies_actors', 'actor_id'); -END; -$$ LANGUAGE plpgsql; - - --- msar.schema_ddl -------------------------------------------------------------------------------- - -CREATE OR REPLACE FUNCTION test_create_schema() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.create_schema( - sch_name => 'create_schema'::text, - if_not_exists => false - ); - RETURN NEXT has_schema('create_schema'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_drop_schema() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE SCHEMA drop_test_schema; -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_schema_if_exists_false() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.drop_schema( - sch_name => 'drop_test_schema', - cascade_ => false, - if_exists => false - ); - RETURN NEXT hasnt_schema('drop_test_schema'); - RETURN NEXT throws_ok( - format( - 'SELECT msar.drop_schema( - sch_name => ''%s'', - cascade_ => false, - if_exists => false - );', - 'drop_non_existing_schema' - ), - '3F000', - 'schema "drop_non_existing_schema" does not exist' - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_schema_if_exists_true() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.drop_schema( - sch_name => 'drop_test_schema', - cascade_ => false, - if_exists => true - ); - RETURN NEXT hasnt_schema('drop_test_schema'); - RETURN NEXT lives_ok( - format( - 'SELECT msar.drop_schema( - sch_name => ''%s'', - cascade_ => false, - if_exists => true - );', - 'drop_non_existing_schema' - ) - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_schema_using_oid() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.drop_schema( - sch_id => 'drop_test_schema'::regnamespace::oid, - cascade_ => false, - if_exists => false - ); - RETURN NEXT hasnt_schema('drop_test_schema'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_schema_with_dependent_obj() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE SCHEMA schema1; - CREATE TABLE schema1.actors ( - id SERIAL PRIMARY KEY, - actor_name TEXT - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_schema_cascade() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.drop_schema( - sch_name => 'schema1', - cascade_ => true, - if_exists => false - ); - RETURN NEXT hasnt_schema('schema1'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_drop_schema_restricted() RETURNS SETOF TEXT AS $$ -BEGIN - RETURN NEXT throws_ok( - format( - 'SELECT msar.drop_schema( - sch_name => ''%s'', - cascade_ => false, - if_exists => false - );', - 'schema1' - ), - '2BP01', - 'cannot drop schema schema1 because other objects depend on it' - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_alter_schema() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE SCHEMA alter_me; -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_rename_schema() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.rename_schema( - old_sch_name => 'alter_me', - new_sch_name => 'altered' - ); - RETURN NEXT hasnt_schema('alter_me'); - RETURN NEXT has_schema('altered'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_rename_schema_using_oid() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.rename_schema( - sch_id => 'alter_me'::regnamespace::oid, - new_sch_name => 'altered' - ); - RETURN NEXT hasnt_schema('alter_me'); - RETURN NEXT has_schema('altered'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_comment_on_schema() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.comment_on_schema( - sch_name => 'alter_me', - comment_ => 'test comment' - ); - RETURN NEXT is(obj_description('alter_me'::regnamespace::oid), 'test comment'); -END; -$$ LANGUAGE plpgsql; - - --- msar.alter_table - -CREATE OR REPLACE FUNCTION setup_alter_table() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE alter_this_table(id INT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, col1 TEXT); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_rename_table() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.rename_table( - sch_name =>'public', - old_tab_name => 'alter_this_table', - new_tab_name => 'renamed_table' - ); - RETURN NEXT hasnt_table('alter_this_table'); - RETURN NEXT has_table('renamed_table'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_rename_table_using_oid() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.rename_table( - tab_id => 'alter_this_table'::regclass::oid, - new_tab_name => 'renamed_table' - ); - RETURN NEXT hasnt_table('alter_this_table'); - RETURN NEXT has_table('renamed_table'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_comment_on_table() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.comment_on_table( - sch_name =>'public', - tab_name => 'alter_this_table', - comment_ => 'This is a comment!' - ); - RETURN NEXT is(obj_description('alter_this_table'::regclass::oid), 'This is a comment!'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_comment_on_table_using_oid() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.comment_on_table( - tab_id => 'alter_this_table'::regclass::oid, - comment_ => 'This is a comment!' - ); - RETURN NEXT is(obj_description('alter_this_table'::regclass::oid), 'This is a comment!'); -END; -$$ LANGUAGE plpgsql; - - --- msar.add_mathesar_table - -CREATE OR REPLACE FUNCTION setup_create_table() RETURNS SETOF TEXT AS $f$ -BEGIN - CREATE SCHEMA tab_create_schema; -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_mathesar_table_minimal_id_col() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.add_mathesar_table( - 'tab_create_schema'::regnamespace::oid, 'anewtable', null, null, null - ); - RETURN NEXT col_is_pk( - 'tab_create_schema', 'anewtable', 'id', 'id column should be pkey' - ); - RETURN NEXT results_eq( - $q$SELECT attidentity - FROM pg_attribute - WHERE attrelid='tab_create_schema.anewtable'::regclass::oid and attname='id'$q$, - $v$VALUES ('d'::"char")$v$, - 'id column should be generated always as identity' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_mathesar_table_badname() RETURNS SETOF TEXT AS $f$ -DECLARE - badname text := $b$M"new"'dsf' \t"$b$; -BEGIN - PERFORM msar.add_mathesar_table( - 'tab_create_schema'::regnamespace::oid, badname, null, null, null - ); - RETURN NEXT has_table('tab_create_schema'::name, badname::name); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_mathesar_table_columns() RETURNS SETOF TEXT AS $f$ -DECLARE - col_defs jsonb := $j$[ - {"name": "mycolumn", "type": {"name": "numeric"}}, - {}, - {"type": {"name": "varchar", "options": {"length": 128}}} - ]$j$; -BEGIN - PERFORM msar.add_mathesar_table( - 'tab_create_schema'::regnamespace::oid, - 'cols_table', - col_defs, - null, null - ); - RETURN NEXT col_is_pk( - 'tab_create_schema', 'cols_table', 'id', 'id column should be pkey' - ); - RETURN NEXT col_type_is( - 'tab_create_schema'::name, 'cols_table'::name, 'mycolumn'::name, 'numeric' - ); - RETURN NEXT col_type_is( - 'tab_create_schema'::name, 'cols_table'::name, 'Column 3'::name, 'character varying(128)' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_add_mathesar_table_comment() RETURNS SETOF TEXT AS $f$ -DECLARE - comment_ text := $c$my "Super;";'; DROP SCHEMA tab_create_schema;'$c$; -BEGIN - PERFORM msar.add_mathesar_table( - 'tab_create_schema'::regnamespace::oid, 'cols_table', null, null, comment_ - ); - RETURN NEXT col_is_pk( - 'tab_create_schema', 'cols_table', 'id', 'id column should be pkey' - ); - RETURN NEXT is( - obj_description('tab_create_schema.cols_table'::regclass::oid), - comment_, - 'created table should have specified description (comment)' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_column_alter() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE col_alters ( - id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY, - col1 text NOT NULL, - col2 numeric DEFAULT 5, - "Col sp" text, - col_opts numeric(5, 3), - coltim timestamp DEFAULT now() - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_process_col_alter_jsonb() RETURNS SETOF TEXT AS $f$/* -These don't actually modify the table, so we can run multiple tests in the same test. - -Only need to test null/empty behavior here, since main functionality is tested by testing -msar.alter_columns - -It's debatable whether this test should continue to exist, but it was useful for initial -development, and runs quickly. -*/ -DECLARE - tab_id oid; -BEGIN - tab_id := 'col_alters'::regclass::oid; - RETURN NEXT is(msar.process_col_alter_jsonb(tab_id, '[{"attnum": 2}]'), null); - RETURN NEXT is(msar.process_col_alter_jsonb(tab_id, '[{"attnum": 2, "name": "blah"}]'), null); - RETURN NEXT is(msar.process_col_alter_jsonb(tab_id, '[]'), null); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_single_name() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := '[{"attnum": 2, "name": "blah"}]'; -BEGIN - RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2]); - RETURN NEXT columns_are( - 'col_alters', - ARRAY['id', 'blah', 'col2', 'Col sp', 'col_opts', 'coltim'] - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_multi_names() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - {"attnum": 2, "name": "new space"}, - {"attnum": 4, "name": "nospace"} - ]$j$; -BEGIN - RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 4]); - RETURN NEXT columns_are( - 'col_alters', - ARRAY['id', 'new space', 'col2', 'nospace', 'col_opts', 'coltim'] - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_type() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - {"attnum": 2, "type": {"name": "varchar", "options": {"length": 48}}}, - {"attnum": 3, "type": {"name": "integer"}}, - {"attnum": 4, "type": {"name": "integer"}} - ]$j$; -BEGIN - RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 3, 4]); - RETURN NEXT col_type_is('col_alters', 'col1', 'character varying(48)'); - RETURN NEXT col_type_is('col_alters', 'col2', 'integer'); - RETURN NEXT col_default_is('col_alters', 'col2', 5); - RETURN NEXT col_type_is('col_alters', 'Col sp', 'integer'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_type_options() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - {"attnum": 5, "type": {"options": {"precision": 4}}} - ]$j$; -BEGIN - RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[5]); - RETURN NEXT col_type_is('col_alters', 'col_opts', 'numeric(4,0)'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_drop() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - {"attnum": 2, "delete": true}, - {"attnum": 5, "delete": true} - ]$j$; -BEGIN - RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 5]); - RETURN NEXT columns_are('col_alters', ARRAY['id', 'col2', 'Col sp', 'coltim']); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_nullable() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - {"attnum": 2, "not_null": false}, - {"attnum": 5, "not_null": true} - ]$j$; -BEGIN - RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 5]); - RETURN NEXT col_is_null('col_alters', 'col1'); - RETURN NEXT col_not_null('col_alters', 'col_opts'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_leaves_defaults() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - {"attnum": 3, "type": {"name": "integer"}}, - {"attnum": 6, "type": {"name": "date"}} - ]$j$; -BEGIN - RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[3, 6]); - RETURN NEXT col_default_is('col_alters', 'col2', '5'); - RETURN NEXT col_default_is('col_alters', 'coltim', '(now())::date'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_drops_defaults() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - {"attnum": 3, "default": null}, - {"attnum": 6, "type": {"name": "date"}, "default": null} - ]$j$; -BEGIN - RETURN NEXT is(msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[3, 6]); - RETURN NEXT col_hasnt_default('col_alters', 'col2'); - RETURN NEXT col_hasnt_default('col_alters', 'coltim'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_sets_defaults() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - {"attnum": 2, "default": "test34"}, - {"attnum": 3, "default": 8}, - {"attnum": 5, "type": {"name": "integer"}, "default": 7}, - {"attnum": 6, "type": {"name": "text"}, "default": "test12"} - ]$j$; -BEGIN - RETURN NEXT is( - msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), - ARRAY[2, 3, 5, 6] - ); - RETURN NEXT col_default_is('col_alters', 'col1', 'test34'); - RETURN NEXT col_default_is('col_alters', 'col2', '8'); - RETURN NEXT col_default_is('col_alters', 'col_opts', '7'); - RETURN NEXT col_default_is('col_alters', 'coltim', 'test12'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_alter_columns_combo() RETURNS SETOF TEXT AS $f$ -DECLARE - col_alters_jsonb jsonb := $j$[ - { - "attnum": 2, - "name": "nullab numeric", - "not_null": false, - "type": {"name": "numeric", "options": {"precision": 8, "scale": 4}}, - "description": "This is; a comment with a semicolon!" - }, - {"attnum": 3, "name": "newcol2"}, - {"attnum": 4, "delete": true}, - {"attnum": 5, "not_null": true}, - {"attnum": 6, "name": "timecol", "not_null": true} - ]$j$; -BEGIN - RETURN NEXT is( - msar.alter_columns('col_alters'::regclass::oid, col_alters_jsonb), ARRAY[2, 3, 4, 5, 6] - ); - RETURN NEXT columns_are( - 'col_alters', ARRAY['id', 'nullab numeric', 'newcol2', 'col_opts', 'timecol'] - ); - RETURN NEXT col_is_null('col_alters', 'nullab numeric'); - RETURN NEXT col_type_is('col_alters', 'nullab numeric', 'numeric(8,4)'); - -- This test checks that nothing funny happened when dropping column 4 - RETURN NEXT col_type_is('col_alters', 'col_opts', 'numeric(5,3)'); - RETURN NEXT col_not_null('col_alters', 'col_opts'); - RETURN NEXT col_not_null('col_alters', 'timecol'); - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), 'This is; a comment with a semicolon!'); - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 3), NULL); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_comment_on_column() RETURNS SETOF TEXT AS $$ -DECLARE - change1 jsonb := $j$[ - { - "attnum": 2, - "description": "change1col2description" - }, - { - "attnum": 3, - "name": "change1col3name" - } - ]$j$; - change2 jsonb := $j$[ - { - "attnum": 2, - "description": "change2col2description" - }, - { - "attnum": 3, - "description": "change2col3description" - } - ]$j$; - -- Below change should not affect the description. - change3 jsonb := $j$[ - { - "attnum": 2, - "name": "change3col2name" - }, - { - "attnum": 3, - "name": "change3col3name" - } - ]$j$; - change4 jsonb := $j$[ - { - "attnum": 2, - "name": "change4col2name", - "description": null - }, - { - "attnum": 3, - "name": "change4col3name" - } - ]$j$; -BEGIN - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), NULL); - PERFORM msar.alter_columns('col_alters'::regclass::oid, change1); - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), 'change1col2description'); - PERFORM msar.alter_columns('col_alters'::regclass::oid, change2); - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), 'change2col2description'); - PERFORM msar.alter_columns('col_alters'::regclass::oid, change3); - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), 'change2col2description'); - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 3), 'change2col3description'); - PERFORM msar.alter_columns('col_alters'::regclass::oid, change4); - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 2), NULL); - RETURN NEXT is(msar.col_description('col_alters'::regclass::oid, 3), 'change2col3description'); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_roster() RETURNS SETOF TEXT AS $$ -BEGIN -CREATE TABLE "Roster" ( - id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - "Student Name" text, - "Teacher" text, - "Teacher Email" text, - "Subject" varchar(20), - "Grade" integer -); -INSERT INTO "Roster" - ("Student Name", "Teacher", "Teacher Email", "Subject", "Grade") -VALUES - ('Stephanie Norris', 'James Jones', 'jamesjones@gmail.com', 'Physics', 43), - ('Stephanie Norris', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 37), - ('Stephanie Norris', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 55), - ('Stephanie Norris', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 41), - ('Stephanie Norris', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 62), - ('Shannon Ramos', 'James Jones', 'jamesjones@gmail.com', 'Math', 44), - ('Shannon Ramos', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 56), - ('Shannon Ramos', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 31), - ('Shannon Ramos', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 77), - ('Shannon Ramos', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 40), - ('Tyler Harris', 'James Jones', 'jamesjones@gmail.com', 'Math', 92), - ('Tyler Harris', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 87), - ('Tyler Harris', 'Brett Bennett', 'brettbennett@gmail.com', 'Reading', 30), - ('Tyler Harris', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 66), - ('Tyler Harris', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 81), - ('Lee Henderson', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 59), - ('Lee Henderson', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 33), - ('Lee Henderson', 'Brett Bennett', 'brettbennett@gmail.com', 'Reading', 82), - ('Lee Henderson', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 95), - ('Lee Henderson', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 93), - ('Amber Swanson', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 67), - ('Amber Swanson', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 62), - ('Amber Swanson', 'Julie Garza', 'juliegarza@yahoo.com', 'Math', 65), - ('Amber Swanson', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 47), - ('Amber Swanson', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 44), - ('Jeffrey Juarez', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 65), - ('Jeffrey Juarez', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 57), - ('Jeffrey Juarez', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 47), - ('Jeffrey Juarez', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Biology', 73), - ('Jeffrey Juarez', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 49), - ('Jennifer Carlson', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 61), - ('Jennifer Carlson', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 68), - ('Jennifer Carlson', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 68), - ('Jennifer Carlson', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 43), - ('Jennifer Carlson', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 80), - ('Chelsea Smith', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 37), - ('Chelsea Smith', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 95), - ('Chelsea Smith', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 49), - ('Chelsea Smith', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 75), - ('Chelsea Smith', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 100), - ('Dana Webb', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Biology', 87), - ('Dana Webb', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 87), - ('Dana Webb', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 78), - ('Dana Webb', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 34), - ('Dana Webb', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 83), - ('Philip Taylor', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 39), - ('Philip Taylor', 'Brett Bennett', 'brettbennett@gmail.com', 'Reading', 48), - ('Philip Taylor', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 84), - ('Philip Taylor', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 26), - ('Philip Taylor', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 92), - ('Christopher Bell', 'Danny Davis', 'dannydavis@hotmail.com', 'Writing', 96), - ('Christopher Bell', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 74), - ('Christopher Bell', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 64), - ('Christopher Bell', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 83), - ('Christopher Bell', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 90), - ('Stacy Barnett', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 55), - ('Stacy Barnett', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 99), - ('Stacy Barnett', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 70), - ('Stacy Barnett', 'Teresa Chambers', 'teresachambers@gmail.com', 'Physics', 78), - ('Stacy Barnett', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'P.E.', 72), - ('Mary Carroll', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 73), - ('Mary Carroll', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 87), - ('Mary Carroll', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 37), - ('Mary Carroll', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 98), - ('Mary Carroll', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 57), - ('Susan Hoover', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 41), - ('Susan Hoover', 'Brett Bennett', 'brettbennett@gmail.com', 'Reading', 77), - ('Susan Hoover', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 48), - ('Susan Hoover', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 41), - ('Susan Hoover', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 89), - ('Jennifer Park', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 96), - ('Jennifer Park', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 25), - ('Jennifer Park', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 43), - ('Jennifer Park', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Biology', 50), - ('Jennifer Park', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 82), - ('Jennifer Ortiz', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 94), - ('Jennifer Ortiz', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 26), - ('Jennifer Ortiz', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 28), - ('Jennifer Ortiz', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 33), - ('Jennifer Ortiz', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 98), - ('Robert Lamb', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 89), - ('Robert Lamb', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 99), - ('Robert Lamb', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 55), - ('Robert Lamb', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 32), - ('Robert Lamb', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Biology', 83), - ('Judy Martinez', 'Danny Davis', 'dannydavis@hotmail.com', 'Writing', 99), - ('Judy Martinez', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 59), - ('Judy Martinez', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 66), - ('Judy Martinez', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'P.E.', 83), - ('Judy Martinez', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 75), - ('Christy Meyer', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 60), - ('Christy Meyer', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 90), - ('Christy Meyer', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 72), - ('Christy Meyer', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 37), - ('Christy Meyer', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 78), - ('Evelyn Anderson', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 64), - ('Evelyn Anderson', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'History', 68), - ('Evelyn Anderson', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 49), - ('Evelyn Anderson', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 42), - ('Evelyn Anderson', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 95), - ('Bethany Bell', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 36), - ('Bethany Bell', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 62), - ('Bethany Bell', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 50), - ('Bethany Bell', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 93), - ('Bethany Bell', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 73), - ('Leslie Hart', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 45), - ('Leslie Hart', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 79), - ('Leslie Hart', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 57), - ('Leslie Hart', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 76), - ('Leslie Hart', 'James Jones', 'jamesjones@gmail.com', 'Math', 75), - ('Carolyn Durham', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'P.E.', 60), - ('Carolyn Durham', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 28), - ('Carolyn Durham', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 25), - ('Carolyn Durham', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 49), - ('Carolyn Durham', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 69), - ('Daniel Martin', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 60), - ('Daniel Martin', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 32), - ('Daniel Martin', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 75), - ('Daniel Martin', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 78), - ('Daniel Martin', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 74), - ('Jessica Jackson', 'Danny Davis', 'dannydavis@hotmail.com', 'Writing', 34), - ('Jessica Jackson', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 78), - ('Jessica Jackson', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 67), - ('Jessica Jackson', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 68), - ('Jessica Jackson', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 88), - ('Stephanie Mendez', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 93), - ('Stephanie Mendez', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 73), - ('Stephanie Mendez', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 27), - ('Stephanie Mendez', 'Teresa Chambers', 'teresachambers@gmail.com', 'Physics', 41), - ('Stephanie Mendez', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 98), - ('Kevin Griffith', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 54), - ('Kevin Griffith', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 57), - ('Kevin Griffith', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 92), - ('Kevin Griffith', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 82), - ('Kevin Griffith', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 48), - ('Debra Johnson', 'Barbara Riley', 'barbarariley@hotmail.com', 'Biology', 38), - ('Debra Johnson', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 44), - ('Debra Johnson', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'History', 32), - ('Debra Johnson', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 32), - ('Debra Johnson', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 41), - ('Mark Frazier', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 78), - ('Mark Frazier', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 25), - ('Mark Frazier', 'Julie Garza', 'juliegarza@yahoo.com', 'Math', 93), - ('Mark Frazier', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 98), - ('Mark Frazier', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Music', 75), - ('Jessica Jones', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 34), - ('Jessica Jones', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 46), - ('Jessica Jones', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 95), - ('Jessica Jones', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 41), - ('Jessica Jones', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 97), - ('Brandon Robinson', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'P.E.', 38), - ('Brandon Robinson', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 64), - ('Brandon Robinson', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 53), - ('Brandon Robinson', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 56), - ('Brandon Robinson', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 39), - ('Timothy Lowe', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 43), - ('Timothy Lowe', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 74), - ('Timothy Lowe', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 62), - ('Timothy Lowe', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 99), - ('Timothy Lowe', 'Grant Mcdonald', 'grantmcdonald@gmail.com', 'Writing', 76), - ('Samantha Rivera', 'James Jones', 'jamesjones@gmail.com', 'Math', 38), - ('Samantha Rivera', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 34), - ('Samantha Rivera', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 55), - ('Samantha Rivera', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'P.E.', 91), - ('Samantha Rivera', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 35), - ('Matthew Brown', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 37), - ('Matthew Brown', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 59), - ('Matthew Brown', 'James Jones', 'jamesjones@gmail.com', 'Math', 83), - ('Matthew Brown', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 100), - ('Matthew Brown', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 40), - ('Mary Gonzalez', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 30), - ('Mary Gonzalez', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 50), - ('Mary Gonzalez', 'Jean Hayes DVM', 'jeanhayesdvm@hotmail.com', 'History', 52), - ('Mary Gonzalez', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 94), - ('Mary Gonzalez', 'James Jones', 'jamesjones@gmail.com', 'Physics', 39), - ('Mr. Patrick Weber MD', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'P.E.', 58), - ('Mr. Patrick Weber MD', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 31), - ('Mr. Patrick Weber MD', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 73), - ('Mr. Patrick Weber MD', 'Michael Harding', 'michaelharding@yahoo.com', 'Music', 72), - ('Mr. Patrick Weber MD', 'Julie Garza', 'juliegarza@yahoo.com', 'Math', 51), - ('Jill Walker', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 43), - ('Jill Walker', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 80), - ('Jill Walker', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 25), - ('Jill Walker', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 39), - ('Jill Walker', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 70), - ('Jacob Higgins', 'Teresa Chambers', 'teresachambers@gmail.com', 'Physics', 95), - ('Jacob Higgins', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 88), - ('Jacob Higgins', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 47), - ('Jacob Higgins', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 59), - ('Jacob Higgins', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 53), - ('Paula Thompson', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Biology', 52), - ('Paula Thompson', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 42), - ('Paula Thompson', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 98), - ('Paula Thompson', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 28), - ('Paula Thompson', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 53), - ('Tyler Phelps', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 33), - ('Tyler Phelps', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 91), - ('Tyler Phelps', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 81), - ('Tyler Phelps', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 30), - ('Tyler Phelps', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 86), - ('John Schaefer', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 44), - ('John Schaefer', 'Joseph Hill', 'josephhill@gmail.com', 'Biology', 69), - ('John Schaefer', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 80), - ('John Schaefer', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 69), - ('John Schaefer', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 45), - ('Eric Kerr', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 45), - ('Eric Kerr', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 90), - ('Eric Kerr', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 50), - ('Eric Kerr', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 92), - ('Eric Kerr', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 77), - ('Mikayla Miller', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 61), - ('Mikayla Miller', 'Brett Bennett', 'brettbennett@gmail.com', 'Writing', 30), - ('Mikayla Miller', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 88), - ('Mikayla Miller', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Biology', 68), - ('Mikayla Miller', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 41), - ('Alejandro Lam', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Music', 48), - ('Alejandro Lam', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 40), - ('Alejandro Lam', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'P.E.', 40), - ('Alejandro Lam', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 49), - ('Alejandro Lam', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 49), - ('Katelyn Ray', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 60), - ('Katelyn Ray', 'Grant Mcdonald', 'grantmcdonald@hotmail.com', 'Reading', 65), - ('Katelyn Ray', 'Julie Garza', 'juliegarza@yahoo.com', 'Math', 82), - ('Katelyn Ray', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 70), - ('Katelyn Ray', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Biology', 59), - ('Carla Rivera', 'Amber Hudson', 'amberhudson@hotmail.com', 'Music', 67), - ('Carla Rivera', 'Julie Garza', 'juliegarza@yahoo.com', 'Physics', 70), - ('Carla Rivera', 'Amber Hudson', 'amberhudson@hotmail.com', 'Art', 94), - ('Carla Rivera', 'Anna Cortez', 'annacortez@yahoo.com', 'Reading', 36), - ('Carla Rivera', 'Michael Harding', 'michaelharding@yahoo.com', 'Art', 51), - ('Larry Alexander', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 57), - ('Larry Alexander', 'Joseph Hill', 'josephhill@gmail.com', 'Chemistry', 97), - ('Larry Alexander', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 89), - ('Larry Alexander', 'Teresa Chambers', 'teresachambers@hotmail.com', 'Math', 66), - ('Larry Alexander', 'Brooke Bowen', 'brookebowen@yahoo.com', 'History', 92), - ('Michael Knox', 'Stephanie Ross', 'stephanieross@yahoo.com', 'Art', 72), - ('Michael Knox', 'Krista Ramirez', 'kristaramirez@yahoo.com', 'History', 65), - ('Michael Knox', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'History', 49), - ('Michael Knox', 'Barbara Riley', 'barbarariley@hotmail.com', 'Chemistry', 29), - ('Michael Knox', 'Jason Aguilar', 'jasonaguilar@gmail.com', 'Chemistry', 83), - ('Alexander Brown', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Music', 89), - ('Alexander Brown', 'Deanna Juarez', 'deannajuarez@hotmail.com', 'Chemistry', 94), - ('Alexander Brown', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 93), - ('Alexander Brown', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Math', 35), - ('Alexander Brown', 'Whitney Figueroa', 'whitneyfigueroa@gmail.com', 'Physics', 71), - ('Anne Sloan', 'Jennifer Anderson', 'jenniferanderson@yahoo.com', 'Art', 38), - ('Anne Sloan', 'Brooke Bowen', 'brookebowen@yahoo.com', 'P.E.', 69), - ('Anne Sloan', 'Danny Davis', 'dannydavis@yahoo.com', 'Reading', 86), - ('Anne Sloan', 'Anna Cortez', 'annacortez@yahoo.com', 'Writing', 39), - ('Anne Sloan', 'James Mccarthy', 'jamesmccarthy@yahoo.com', 'P.E.', 96); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_extract_columns_data() RETURNS SETOF TEXT AS $f$ -BEGIN - CREATE TABLE roster_snapshot AS SELECT * FROM "Roster" ORDER BY id; - PERFORM msar.extract_columns_from_table('"Roster"'::regclass::oid, ARRAY[3, 4], 'Teachers', null); - RETURN NEXT columns_are('Teachers', ARRAY['id', 'Teacher', 'Teacher Email']); - RETURN NEXT columns_are('Roster', ARRAY['id', 'Student Name', 'Subject', 'Grade', 'Teachers_id']); - RETURN NEXT fk_ok('Roster', 'Teachers_id', 'Teachers', 'id'); - RETURN NEXT set_eq( - 'SELECT "Teacher", "Teacher Email" FROM "Teachers"', - 'SELECT DISTINCT "Teacher", "Teacher Email" FROM roster_snapshot', - 'Extracted data should be unique tuples' - ); - RETURN NEXT results_eq( - 'SELECT "Student Name", "Subject", "Grade" FROM "Roster" ORDER BY id', - 'SELECT "Student Name", "Subject", "Grade" FROM roster_snapshot ORDER BY id', - 'Remainder data should be unchanged' - ); - RETURN NEXT results_eq( - $q$ - SELECT r.id, "Student Name", "Teacher", "Teacher Email", "Subject", "Grade" - FROM "Roster" r LEFT JOIN "Teachers" t ON r."Teachers_id"=t.id ORDER BY r.id - $q$, - 'SELECT * FROM roster_snapshot ORDER BY id', - 'Joining extracted data should recover original' - ); - RETURN NEXT lives_ok( - $i$ - INSERT INTO "Teachers" ("Teacher", "Teacher Email") VALUES ('Miyagi', 'miyagi@karatekid.com') - $i$, - 'The new id column should be incremented to avoid collision' - ); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_extract_fkey_cols() RETURNS SETOF TEXT AS $$ -BEGIN -CREATE TABLE "Referent" ( - id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - "Teacher" text, - "Teacher Email" text -); -CREATE TABLE "Referrer" ( - id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - "Student Name" text, - "Subject" varchar(20), - "Grade" integer, - "Referent_id" integer REFERENCES "Referent" (id) -); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_extract_columns_keeps_fkey() RETURNS SETOF TEXT AS $f$ -BEGIN - PERFORM msar.extract_columns_from_table( - '"Referrer"'::regclass::oid, ARRAY[3, 5], 'Classes', 'Class' - ); - RETURN NEXT columns_are('Referent', ARRAY['id', 'Teacher', 'Teacher Email']); - RETURN NEXT columns_are('Referrer', ARRAY['id', 'Student Name', 'Grade', 'Class']); - RETURN NEXT columns_are('Classes', ARRAY['id', 'Subject', 'Referent_id']); - RETURN NEXT fk_ok('Referrer', 'Class', 'Classes', 'id'); - RETURN NEXT fk_ok('Classes', 'Referent_id', 'Referent', 'id'); -END; -$f$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION setup_dynamic_defaults() RETURNS SETOF TEXT AS $$ -BEGIN - CREATE TABLE defaults_test ( - id integer PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - col1 integer DEFAULT 5, - col2 integer DEFAULT 3::integer, - col3 timestamp DEFAULT NOW(), - col4 date DEFAULT '2023-01-01', - col5 date DEFAULT CURRENT_DATE - ); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_is_possibly_dynamic() RETURNS SETOF TEXT AS $$ -DECLARE - tab_id oid; -BEGIN - tab_id := 'defaults_test'::regclass::oid; - RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 1), true); - RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 2), false); - RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 3), false); - RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 4), true); - RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 5), false); - RETURN NEXT is(msar.is_default_possibly_dynamic(tab_id, 6), true); -END; -$$ LANGUAGE plpgsql; - - -CREATE OR REPLACE FUNCTION test_create_basic_mathesar_user() RETURNS SETOF TEXT AS $$ -BEGIN - PERFORM msar.create_basic_mathesar_user('testuser', 'mypass1234'); - RETURN NEXT database_privs_are ( - 'mathesar_testing', 'testuser', ARRAY['CREATE', 'CONNECT', 'TEMPORARY'] - ); - RETURN NEXT schema_privs_are ('msar', 'testuser', ARRAY['USAGE']); - RETURN NEXT schema_privs_are ('__msar', 'testuser', ARRAY['USAGE']); - PERFORM msar.create_basic_mathesar_user( - 'Ro"\bert''); DROP SCHEMA public;', 'my''pass1234"; DROP SCHEMA public;' - ); - RETURN NEXT has_schema('public'); - RETURN NEXT has_user('Ro"\bert''); DROP SCHEMA public;'); - RETURN NEXT database_privs_are ( - 'mathesar_testing', 'Ro"\bert''); DROP SCHEMA public;', ARRAY['CREATE', 'CONNECT', 'TEMPORARY'] - ); - RETURN NEXT schema_privs_are ('msar', 'Ro"\bert''); DROP SCHEMA public;', ARRAY['USAGE']); - RETURN NEXT schema_privs_are ('__msar', 'Ro"\bert''); DROP SCHEMA public;', ARRAY['USAGE']); -END; -$$ LANGUAGE plpgsql; diff --git a/db/sql/test_startup.sql b/db/sql/test_startup.sql index 61408c7810..72e06bb899 100644 --- a/db/sql/test_startup.sql +++ b/db/sql/test_startup.sql @@ -8,5 +8,5 @@ $$ LANGUAGE plpgsql; CALL raise_notice('Creating testing DB'); CREATE DATABASE mathesar_testing; \c mathesar_testing -\ir 0_msar.sql -\ir test_0_msar.sql +\ir 00_msar.sql +\ir test_00_msar.sql diff --git a/db/tables/operations/alter.py b/db/tables/operations/alter.py index 7ecb160163..72c72ca514 100644 --- a/db/tables/operations/alter.py +++ b/db/tables/operations/alter.py @@ -1,4 +1,6 @@ """The functions in this module wrap SQL functions that use `ALTER TABLE`.""" +import json + from db import constants from db import connection as db_conn from db.columns.operations.alter import batch_update_columns @@ -50,6 +52,26 @@ def alter_table(table_name, table_oid, schema, engine, update_data): batch_update_columns(table_oid, engine, update_data['columns']) +def alter_table_on_database(table_oid, table_data_dict, conn): + """ + Alter the name, description, or columns of a table, returning name of the altered table. + + Args: + table_oid: The OID of the table to be altered. + table_data_dict: A dict describing the alterations to make. + + table_data_dict should have the form: + { + "name": , + "description": , + "columns": of column_data describing columns to alter. + } + """ + return db_conn.exec_msar_func( + conn, 'alter_table', table_oid, json.dumps(table_data_dict) + ).fetchone()[0] + + def update_pk_sequence_to_latest(engine, table, connection=None): """ Update the primary key sequence to the current maximum. diff --git a/db/tables/operations/create.py b/db/tables/operations/create.py index 61fc6b6729..13fa5802a7 100644 --- a/db/tables/operations/create.py +++ b/db/tables/operations/create.py @@ -1,7 +1,7 @@ from sqlalchemy.ext import compiler from sqlalchemy.schema import DDLElement import json -from db.connection import execute_msar_func_with_engine +from db.connection import execute_msar_func_with_engine, exec_msar_func from db.types.base import PostgresType from db.tables.operations.select import reflect_table_from_oid from db.metadata import get_empty_metadata @@ -29,11 +29,48 @@ def create_mathesar_table(engine, table_name, schema_oid, columns=[], constraint table_name, json.dumps(columns), json.dumps(constraints), + None, + comment + ).fetchone()[0]["oid"] + + +def create_table_on_database( + table_name, + schema_oid, + conn, + column_data_list=[], + constraint_data_list=[], + owner_oid=None, + comment=None +): + """ + Creates a table with a default id column. + + Args: + table_name: Name of the table to be created. + schema_oid: The OID of the schema where the table will be created. + columns: The columns dict for the new table, in order. (optional) + constraints: The constraints dict for the new table. (optional) + owner_oid: The OID of the role who will own the new table.(optional) + comment: The comment for the new table. (optional) + + Returns: + Returns the OID and name of the created table. + """ + return exec_msar_func( + conn, + 'add_mathesar_table', + schema_oid, + table_name, + json.dumps(column_data_list), + json.dumps(constraint_data_list), + owner_oid, comment ).fetchone()[0] # TODO stop relying on reflections, instead return oid of the created table. +# TODO remove this function def create_string_column_table(name, schema_oid, column_names, engine, comment=None): """ This method creates a Postgres table in the specified schema, with all @@ -50,6 +87,50 @@ def create_string_column_table(name, schema_oid, column_names, engine, comment=N return table +def prepare_table_for_import( + table_name, + schema_oid, + column_names, + header, + conn, + delimiter=None, + escapechar=None, + quotechar=None, + encoding=None, + comment=None +): + """ + This method creates a Postgres table in the specified schema, with all + columns being String type. + + Returns the copy_sql and table_oid for carrying out import into the created table. + """ + column_data_list = [ + { + "name": column_name, + "type": {"name": PostgresType.TEXT.id} + } for column_name in column_names + ] + import_info = exec_msar_func( + conn, + 'prepare_table_for_import', + schema_oid, + table_name, + json.dumps(column_data_list), + header, + delimiter, + escapechar, + quotechar, + encoding, + comment + ).fetchone()[0] + return ( + import_info['copy_sql'], + import_info['table_oid'], + import_info['table_name'] + ) + + class CreateTableAs(DDLElement): def __init__(self, name, selectable): self.name = name diff --git a/db/tables/operations/drop.py b/db/tables/operations/drop.py index f0f40b3c88..9ffeb170d2 100644 --- a/db/tables/operations/drop.py +++ b/db/tables/operations/drop.py @@ -1,5 +1,21 @@ -from db.connection import execute_msar_func_with_engine +from db.connection import execute_msar_func_with_engine, exec_msar_func def drop_table(name, schema, engine, cascade=False, if_exists=False): execute_msar_func_with_engine(engine, 'drop_table', schema, name, cascade, if_exists) + + +def drop_table_from_database(table_oid, conn, cascade=False): + """ + Drop a table. + + Args: + table_oid: OID of the table to drop. + cascade: Whether to drop the dependent objects. + + Returns: + Returns the fully qualified name of the dropped table. + """ + return exec_msar_func( + conn, 'drop_table', table_oid, cascade + ).fetchone()[0] diff --git a/db/tables/operations/import_.py b/db/tables/operations/import_.py new file mode 100644 index 0000000000..5c6bd31b0c --- /dev/null +++ b/db/tables/operations/import_.py @@ -0,0 +1,92 @@ +import json +import tempfile + +import clevercsv as csv + +from db.connection import exec_msar_func +from db.columns.operations.alter import _transform_column_alter_dict +from db.tables.operations.create import prepare_table_for_import +from db.encoding_utils import get_sql_compatible_encoding +from mathesar.models.deprecated import DataFile +from mathesar.imports.csv import get_file_encoding, get_sv_reader, process_column_names + + +def import_csv(data_file_id, table_name, schema_oid, conn, comment=None): + data_file = DataFile.objects.get(id=data_file_id) + file_path = data_file.file.path + header = data_file.header + if table_name is None or table_name == '': + table_name = data_file.base_name + dialect = csv.dialect.SimpleDialect( + data_file.delimiter, + data_file.quotechar, + data_file.escapechar + ) + encoding = get_file_encoding(data_file.file) + conversion_encoding, sql_encoding = get_sql_compatible_encoding(encoding) + with open(file_path, 'rb') as csv_file: + csv_reader = get_sv_reader(csv_file, header, dialect) + column_names = process_column_names(csv_reader.fieldnames) + copy_sql, table_oid, db_table_name = prepare_table_for_import( + table_name, + schema_oid, + column_names, + header, + conn, + dialect.delimiter, + dialect.escapechar, + dialect.quotechar, + sql_encoding, + comment + ) + insert_csv_records( + copy_sql, + file_path, + encoding, + conversion_encoding, + conn + ) + return {"oid": table_oid, "name": db_table_name} + + +def insert_csv_records( + copy_sql, + file_path, + encoding, + conversion_encoding, + conn +): + cursor = conn.cursor() + with open(file_path, 'r', encoding=encoding) as csv_file: + if conversion_encoding == encoding: + with cursor.copy(copy_sql) as copy: + while data := csv_file.read(): + copy.write(data) + else: + # File needs to be converted to compatible database supported encoding + with tempfile.SpooledTemporaryFile(mode='wb+', encoding=conversion_encoding) as temp_file: + while True: + contents = csv_file.read().encode(conversion_encoding, "replace") + if not contents: + break + temp_file.write(contents) + temp_file.seek(0) + with cursor.copy(copy_sql) as copy: + while data := temp_file.read(): + copy.write(data) + + +def get_preview(table_oid, column_list, conn, limit=20): + """ + Preview an imported table. Returning the records from the specified columns of the table. + + Args: + table_oid: Identity of the imported table in the user's database. + column_list: List of settings describing the casts to be applied to the columns. + limit: The upper limit for the number of records to return. + + Note that these casts are temporary and do not alter the data in the underlying table, + if you wish to alter these settings permanantly for the columns see tables/alter.py. + """ + transformed_column_data = [_transform_column_alter_dict(col) for col in column_list] + return exec_msar_func(conn, 'get_preview', table_oid, json.dumps(transformed_column_data), limit).fetchone()[0] diff --git a/db/tables/operations/infer_types.py b/db/tables/operations/infer_types.py index e7d3c522a3..54cc2b5b59 100644 --- a/db/tables/operations/infer_types.py +++ b/db/tables/operations/infer_types.py @@ -5,7 +5,8 @@ from db import constants from db.columns.base import MathesarColumn from db.columns.operations.infer_types import infer_column_type -from db.schemas.operations.create import create_schema +from db.connection import exec_msar_func +from db.schemas.operations.create import create_schema_if_not_exists_via_sql_alchemy from db.tables.operations.create import CreateTableAs from db.tables.operations.select import reflect_table from db.types.operations.convert import get_db_type_enum_from_class @@ -16,6 +17,25 @@ TEMP_TABLE = f"{constants.MATHESAR_PREFIX}temp_table_%s" +def infer_table_column_data_types(conn, table_oid): + """ + Infer the best type for each column in the table. + + Currently we only suggest different types for columns which originate + as type `text`. + + Args: + tab_id: The OID of the table whose columns we're inferring types for. + + The response JSON will have attnum keys, and values will be the + result of `format_type` for the inferred type of each column. + Restricted to columns to which the user has access. + """ + return exec_msar_func( + conn, 'infer_table_column_data_types', table_oid + ).fetchone()[0] + + def update_table_column_types(schema, table_name, engine, metadata=None, columns_might_have_defaults=True): metadata = metadata if metadata else get_empty_metadata() table = reflect_table(table_name, schema, engine, metadata=metadata) @@ -43,7 +63,7 @@ def infer_table_column_types(schema, table_name, engine, metadata=None, columns_ table = reflect_table(table_name, schema, engine, metadata=metadata) temp_name = TEMP_TABLE % (int(time())) - create_schema(TEMP_SCHEMA, engine, if_not_exists=True) + create_schema_if_not_exists_via_sql_alchemy(TEMP_SCHEMA, engine) with engine.begin() as conn: while engine.dialect.has_table(conn, temp_name, schema=TEMP_SCHEMA): temp_name = TEMP_TABLE.format(int(time())) diff --git a/db/tables/operations/move_columns.py b/db/tables/operations/move_columns.py index f4758a7c85..714b7fd9be 100644 --- a/db/tables/operations/move_columns.py +++ b/db/tables/operations/move_columns.py @@ -2,6 +2,7 @@ from sqlalchemy.dialects.postgresql import insert from db import constants +from db.connection import exec_msar_func from db.columns.base import MathesarColumn from db.columns.operations.alter import batch_alter_table_drop_columns from db.columns.operations.create import bulk_create_mathesar_column @@ -10,6 +11,16 @@ from db.metadata import get_empty_metadata +def move_columns_to_referenced_table(conn, source_table_oid, target_table_oid, move_column_attnums): + exec_msar_func( + conn, + 'move_columns_to_referenced_table', + source_table_oid, + target_table_oid, + move_column_attnums + ) + + def move_columns_between_related_tables( source_table_oid, target_table_oid, diff --git a/db/tables/operations/select.py b/db/tables/operations/select.py index 5104d029e0..fea5b168b4 100644 --- a/db/tables/operations/select.py +++ b/db/tables/operations/select.py @@ -3,6 +3,7 @@ ) from sqlalchemy.dialects.postgresql import JSONB +from db.connection import exec_msar_func from db.utils import execute_statement, get_pg_catalog_table BASE = 'base' @@ -14,6 +15,36 @@ MULTIPLE_RESULTS = 'multiple_results' +def get_table(table, conn): + """ + Return a dictionary describing a table of a schema. + + The `table` can be given as either a "qualified name", or an OID. + The OID is the preferred identifier, since it's much more robust. + + Args: + table: The table for which we want table info. + """ + return exec_msar_func(conn, 'get_table', table).fetchone()[0] + + +def get_table_info(schema, conn): + """ + Return a list of dictionaries describing the tables of a schema. + + The `schema` can be given as either a "qualified name", or an OID. + The OID is the preferred identifier, since it's much more robust. + + Args: + schema: The schema for which we want table info. + """ + return exec_msar_func(conn, 'get_table_info', schema).fetchone()[0] + + +def list_joinable_tables(table_oid, conn, max_depth): + return exec_msar_func(conn, 'get_joinable_tables', max_depth, table_oid).fetchone()[0] + + def reflect_table(name, schema, engine, metadata, connection_to_use=None, keep_existing=False): extend_existing = not keep_existing autoload_with = engine if connection_to_use is None else connection_to_use diff --git a/db/tables/operations/split.py b/db/tables/operations/split.py index c9260eacc0..e3a710d8bb 100644 --- a/db/tables/operations/split.py +++ b/db/tables/operations/split.py @@ -1,4 +1,4 @@ -from db.connection import execute_msar_func_with_engine +from db.connection import execute_msar_func_with_engine, exec_msar_func def extract_columns_from_table( @@ -14,3 +14,24 @@ def extract_columns_from_table( ) extracted_table_oid, new_fkey_attnum = curr.fetchone()[0] return extracted_table_oid, old_table_oid, new_fkey_attnum + + +def split_table( + conn, + old_table_oid, + extracted_column_attnums, + extracted_table_name, + relationship_fk_column_name=None +): + extracted_table_oid, new_fkey_attnum = exec_msar_func( + conn, + 'extract_columns_from_table', + old_table_oid, + extracted_column_attnums, + extracted_table_name, + relationship_fk_column_name + ).fetchone()[0] + return { + 'extracted_table_oid': extracted_table_oid, + 'new_fkey_attnum': new_fkey_attnum + } diff --git a/db/tests/columns/operations/test_alter.py b/db/tests/columns/operations/test_alter.py index 7a783a6bf9..78dbeb1abe 100644 --- a/db/tests/columns/operations/test_alter.py +++ b/db/tests/columns/operations/test_alter.py @@ -1,6 +1,9 @@ +import json +from unittest.mock import patch from sqlalchemy import Column, select, Table, MetaData, VARCHAR, INTEGER from db import constants +from db.columns.operations import alter as col_alt from db.columns.operations.alter import batch_update_columns, rename_column from db.columns.operations.select import ( get_column_attnum_from_name, get_column_name_from_attnum, @@ -18,6 +21,44 @@ from db.schemas.utils import get_schema_oid_from_name +def test_alter_columns_in_table_basic(): + with patch.object(col_alt.db_conn, 'exec_msar_func') as mock_exec: + col_alt.alter_columns_in_table( + 123, + [ + { + "id": 3, "name": "colname3", "type": "numeric", + "type_options": {"precision": 8}, "nullable": True, + "default": {"value": 8, "is_dynamic": False}, + "description": "third column" + }, { + "id": 6, "name": "colname6", "type": "character varying", + "type_options": {"length": 32}, "nullable": True, + "default": {"value": "blahblah", "is_dynamic": False}, + "description": "textual column" + } + ], + 'conn' + ) + expect_json_arg = [ + { + "attnum": 3, "name": "colname3", + "type": {"name": "numeric", "options": {"precision": 8}}, + "not_null": False, "default": 8, "description": "third column", + }, { + "attnum": 6, "name": "colname6", + "type": { + "name": "character varying", "options": {"length": 32}, + }, + "not_null": False, "default": "blahblah", + "description": "textual column" + } + ] + assert mock_exec.call_args.args[:3] == ('conn', 'alter_columns', 123) + # Necessary since `json.dumps` mangles dict ordering, but we don't care. + assert json.loads(mock_exec.call_args.args[3]) == expect_json_arg + + def _rename_column_and_assert(table, old_col_name, new_col_name, engine): """ Renames the colum of a table and assert the change went through diff --git a/db/tests/columns/operations/test_create.py b/db/tests/columns/operations/test_create.py index b668e71bf6..a248fd5d49 100644 --- a/db/tests/columns/operations/test_create.py +++ b/db/tests/columns/operations/test_create.py @@ -21,38 +21,36 @@ def test_type_list_completeness(engine): @pytest.mark.parametrize( - "in_name,out_name", [('test1', 'test1'), ('', None), (None, None)] + "in_name,out_name", [("test1", "test1"), ("", None), (None, None)] ) -def test_create_column_name(engine_with_schema, in_name, out_name): +def test_add_columns_name(in_name, out_name): """ Here, we just check that the PostgreSQL function is called properly, when given a (maybe empty) name param """ - engine, schema = engine_with_schema - with patch.object(col_create, "execute_msar_func_with_engine") as mock_exec: - col_create.create_column(engine, 12345, {"name": in_name}) + with patch.object(col_create.db_conn, "exec_msar_func") as mock_exec: + col_create.add_columns_to_table(123, [{"name": in_name}], "conn") call_args = mock_exec.call_args_list[0][0] - assert call_args[0] == engine + assert call_args[0] == "conn" assert call_args[1] == "add_columns" - assert call_args[2] == 12345 + assert call_args[2] == 123 assert json.loads(call_args[3])[0]["name"] == out_name @pytest.mark.parametrize( "in_type,out_type", [("numeric", "numeric"), (None, "character varying")] ) -def test_create_column_type(engine_with_schema, in_type, out_type): +def test_add_columns_type(in_type, out_type): """ Here, we just check that the PostgreSQL function is called properly when given a (maybe empty) type """ - engine, schema = engine_with_schema - with patch.object(col_create, "execute_msar_func_with_engine") as mock_exec: - col_create.create_column(engine, 12345, {"type": in_type}) + with patch.object(col_create.db_conn, "exec_msar_func") as mock_exec: + col_create.add_columns_to_table(123, [{"type": in_type}], "conn") call_args = mock_exec.call_args_list[0][0] - assert call_args[0] == engine + assert call_args[0] == "conn" assert call_args[1] == "add_columns" - assert call_args[2] == 12345 + assert call_args[2] == 123 actual_col_data = json.loads(call_args[3])[0] assert actual_col_data["name"] is None assert actual_col_data["type"]["name"] == out_type @@ -62,18 +60,17 @@ def test_create_column_type(engine_with_schema, in_type, out_type): @pytest.mark.parametrize( "in_options,out_options", [({"foo": "bar"}, {"foo": "bar"}), (None, None), ({}, {})] ) -def test_create_column_type_options(engine_with_schema, in_options, out_options): +def test_add_columns_type_options(in_options, out_options): """ Here, we just check that the PostgreSQL function is called properly when given a (maybe empty) type options dict. """ - engine, schema = engine_with_schema - with patch.object(col_create, "execute_msar_func_with_engine") as mock_exec: - col_create.create_column(engine, 12345, {"type_options": in_options}) + with patch.object(col_create.db_conn, "exec_msar_func") as mock_exec: + col_create.add_columns_to_table(123, [{"type_options": in_options}], "conn") call_args = mock_exec.call_args_list[0][0] - assert call_args[0] == engine + assert call_args[0] == "conn" assert call_args[1] == "add_columns" - assert call_args[2] == 12345 + assert call_args[2] == 123 assert json.loads(call_args[3])[0]["type"]["name"] == "character varying" assert json.loads(call_args[3])[0]["type"]["options"] == out_options @@ -81,7 +78,7 @@ def test_create_column_type_options(engine_with_schema, in_options, out_options) def test_duplicate_column_smoke(engine_with_schema): """This is just a smoke test, since the underlying function is trivial.""" engine, schema = engine_with_schema - with patch.object(col_create, "execute_msar_func_with_engine") as mock_exec: + with patch.object(col_create.db_conn, "execute_msar_func_with_engine") as mock_exec: col_create.duplicate_column( 12345, 4, diff --git a/db/tests/columns/operations/test_drop.py b/db/tests/columns/operations/test_drop.py new file mode 100644 index 0000000000..0bedaa2cf3 --- /dev/null +++ b/db/tests/columns/operations/test_drop.py @@ -0,0 +1,18 @@ +from unittest.mock import patch +from db.columns.operations import drop as col_drop + + +def test_drop_columns(): + with patch.object(col_drop.db_conn, 'exec_msar_func') as mock_exec: + mock_exec.return_value.fetchone = lambda: (3,) + result = col_drop.drop_columns_from_table(123, [1, 3, 5], 'conn') + mock_exec.assert_called_once_with('conn', 'drop_columns', 123, 1, 3, 5) + assert result == 3 + + +def test_drop_columns_single(): + with patch.object(col_drop.db_conn, 'exec_msar_func') as mock_exec: + mock_exec.return_value.fetchone = lambda: (1,) + result = col_drop.drop_columns_from_table(123, [1], 'conn') + mock_exec.assert_called_once_with('conn', 'drop_columns', 123, 1) + assert result == 1 diff --git a/db/tests/columns/operations/test_select.py b/db/tests/columns/operations/test_select.py index 242a8e6847..44c995766f 100644 --- a/db/tests/columns/operations/test_select.py +++ b/db/tests/columns/operations/test_select.py @@ -1,9 +1,11 @@ +from unittest.mock import patch import warnings import pytest from sqlalchemy import ( String, Integer, Column, Table, MetaData, DateTime, func ) from db.columns.exceptions import DynamicDefaultWarning +from db.columns.operations import select as col_select from db.columns.operations.select import ( get_column_attnum_from_name, get_column_default, get_column_name_from_attnum, get_columns_attnum_from_names, @@ -13,6 +15,14 @@ from db.metadata import get_empty_metadata +def test_get_column_info_for_table(): + with patch.object(col_select, 'exec_msar_func') as mock_exec: + mock_exec.return_value.fetchone = lambda: ('a', 'b') + result = col_select.get_column_info_for_table('table', 'conn') + mock_exec.assert_called_once_with('conn', 'get_column_info', 'table') + assert result == 'a' + + def test_get_attnum_from_name(engine_with_schema): engine, schema = engine_with_schema table_name = "table_with_columns" diff --git a/db/tests/constraints/operations/test_create.py b/db/tests/constraints/operations/test_create.py index da59b1afe6..476ef1c74b 100644 --- a/db/tests/constraints/operations/test_create.py +++ b/db/tests/constraints/operations/test_create.py @@ -23,7 +23,7 @@ def test_add_constraint_db(engine_with_schema, constraint_obj): engine = engine_with_schema with patch.object(con_create, 'execute_msar_func_with_engine') as mock_exec: - con_create.add_constraint( + con_create.add_constraint_via_sql_alchemy( engine=engine, constraint_obj=constraint_obj ) diff --git a/db/tests/dependents/test_dependents.py b/db/tests/dependents/test_dependents.py index 1b8e0f5afa..fc838bec23 100644 --- a/db/tests/dependents/test_dependents.py +++ b/db/tests/dependents/test_dependents.py @@ -1,7 +1,7 @@ import pytest from sqlalchemy import MetaData, select, Index from sqlalchemy_utils import create_view -from db.constraints.operations.create import add_constraint +from db.constraints.operations.create import add_constraint_via_sql_alchemy from db.constraints.base import ForeignKeyConstraint from db.dependents.dependents_utils import get_dependents_graph from db.constraints.operations.select import get_constraint_oid_by_name_and_table_oid @@ -93,7 +93,7 @@ def test_self_reference(engine_with_schema, library_tables_oids): fk_column_attnum = create_column(engine, publishers_oid, {'name': 'Parent Publisher', 'type': PostgresType.INTEGER.id})[0] pk_column_attnum = get_column_attnum_from_name(publishers_oid, 'id', engine, metadata=get_empty_metadata()) fk_constraint = ForeignKeyConstraint('Publishers_Publisher_fkey', publishers_oid, [fk_column_attnum], publishers_oid, [pk_column_attnum], {}) - add_constraint(fk_constraint, engine) + add_constraint_via_sql_alchemy(fk_constraint, engine) publishers_oid = library_tables_oids['Publishers'] publishers_dependents_graph = get_dependents_graph(publishers_oid, engine, []) @@ -114,7 +114,7 @@ def test_circular_reference(engine_with_schema, library_tables_oids): fk_column_attnum = create_column(engine, publishers_oid, {'name': 'Top Publication', 'type': PostgresType.INTEGER.id})[0] publications_pk_column_attnum = get_column_attnum_from_name(publications_oid, 'id', engine, metadata=get_empty_metadata()) fk_constraint = ForeignKeyConstraint('Publishers_Publications_fkey', publishers_oid, [fk_column_attnum], publications_oid, [publications_pk_column_attnum], {}) - add_constraint(fk_constraint, engine) + add_constraint_via_sql_alchemy(fk_constraint, engine) publishers_dependents_graph = get_dependents_graph(publishers_oid, engine, []) publications_dependents_oids = _get_object_dependents_oids(publishers_dependents_graph, publications_oid) diff --git a/db/tests/links/operations/test_create.py b/db/tests/links/operations/test_create.py index 27b974d1e3..6172e78371 100644 --- a/db/tests/links/operations/test_create.py +++ b/db/tests/links/operations/test_create.py @@ -1,3 +1,4 @@ +import json import pytest from unittest.mock import patch import db.links.operations.create as link_create @@ -18,10 +19,10 @@ def test_create_foreign_key_link(engine_with_schema, unique_link): ) call_args = mock_exec.call_args_list[0][0] assert call_args[0] == engine - assert call_args[1] == "create_many_to_one_link" - assert call_args[2] == 12345 + assert call_args[1] == "add_foreign_key_column" + assert call_args[2] == "actor_id" assert call_args[3] == 54321 - assert call_args[4] == "actor_id" + assert call_args[4] == 12345 assert call_args[5] == unique_link or False @@ -37,8 +38,10 @@ def test_many_to_many_link(engine_with_schema): ) call_args = mock_exec.call_args_list[0][0] assert call_args[0] == engine - assert call_args[1] == "create_many_to_many_link" + assert call_args[1] == "add_mapping_table" assert call_args[2] == 2200 assert call_args[3] == "movies_actors" - assert call_args[4] == referents['referent_table_oids'] - assert call_args[5] == referents['column_names'] + assert json.loads(call_args[4]) == [ + {"column_name": "movie_id", "referent_table_oid": 12345}, + {"column_name": "actor_id", "referent_table_oid": 54321} + ] diff --git a/setup/__init__.py b/db/tests/roles/__init__.py similarity index 100% rename from setup/__init__.py rename to db/tests/roles/__init__.py diff --git a/db/tests/roles/operations/__init__.py b/db/tests/roles/operations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/db/tests/roles/operations/test_select.py b/db/tests/roles/operations/test_select.py new file mode 100644 index 0000000000..5f9e42f641 --- /dev/null +++ b/db/tests/roles/operations/test_select.py @@ -0,0 +1,18 @@ +from unittest.mock import patch +from db.roles.operations import select as ma_sel + + +def test_list_roles(): + with patch.object(ma_sel, 'exec_msar_func') as mock_exec: + mock_exec.return_value.fetchone = lambda: ('a', 'b') + result = ma_sel.list_roles('conn') + mock_exec.assert_called_once_with('conn', 'list_roles') + assert result == 'a' + + +def test_list_schema_privileges(): + with patch.object(ma_sel, 'exec_msar_func') as mock_exec: + mock_exec.return_value.fetchone = lambda: ('a', 'b') + result = ma_sel.list_schema_privileges(123456, 'conn') + mock_exec.assert_called_once_with('conn', 'list_schema_privileges', 123456) + assert result == 'a' diff --git a/db/tests/roles/operations/test_update.py b/db/tests/roles/operations/test_update.py new file mode 100644 index 0000000000..c98e456684 --- /dev/null +++ b/db/tests/roles/operations/test_update.py @@ -0,0 +1,29 @@ +import json +from unittest.mock import patch +from db.roles.operations import update as rupdate + + +def test_replace_database_privileges_for_roles(): + priv_spec = [{"role_oid": 1234, "privileges": ["CONNECT", "CREATE"]}] + with patch.object(rupdate, 'exec_msar_func') as mock_exec: + mock_exec.return_value.fetchone = lambda: ('a', 'b') + result = rupdate.replace_database_privileges_for_roles('conn', priv_spec) + mock_exec.assert_called_once_with( + 'conn', 'replace_database_privileges_for_roles', json.dumps(priv_spec) + ) + assert result == 'a' + + +def test_replace_schema_privileges_for_roles(): + schema_oid = 12345 + priv_spec = [{"role_oid": 1234, "privileges": ["UPDATE", "CREATE"]}] + with patch.object(rupdate, 'exec_msar_func') as mock_exec: + mock_exec.return_value.fetchone = lambda: ('a', 'b') + result = rupdate.replace_schema_privileges_for_roles( + 'conn', schema_oid, priv_spec + ) + mock_exec.assert_called_once_with( + 'conn', 'replace_schema_privileges_for_roles', + schema_oid, json.dumps(priv_spec) + ) + assert result == 'a' diff --git a/db/tests/schemas/operations/test_alter.py b/db/tests/schemas/operations/test_alter.py deleted file mode 100644 index 3cc3e7c995..0000000000 --- a/db/tests/schemas/operations/test_alter.py +++ /dev/null @@ -1,28 +0,0 @@ -from unittest.mock import patch -import db.schemas.operations.alter as sch_alter - - -def test_rename_schema(engine_with_schema): - engine = engine_with_schema - with patch.object(sch_alter, 'execute_msar_func_with_engine') as mock_exec: - sch_alter.rename_schema('rename_me', engine, rename_to='renamed') - call_args = mock_exec.call_args_list[0][0] - assert call_args[0] == engine - assert call_args[1] == "rename_schema" - assert call_args[2] == "rename_me" - assert call_args[3] == "renamed" - - -def test_comment_on_schema(engine_with_schema): - engine = engine_with_schema - with patch.object(sch_alter, 'execute_msar_func_with_engine') as mock_exec: - sch_alter.comment_on_schema( - schema_name='comment_on_me', - engine=engine, - comment='This is a comment' - ) - call_args = mock_exec.call_args_list[0][0] - assert call_args[0] == engine - assert call_args[1] == "comment_on_schema" - assert call_args[2] == "comment_on_me" - assert call_args[3] == "This is a comment" diff --git a/db/tests/schemas/operations/test_create.py b/db/tests/schemas/operations/test_create.py index 8c6d796f80..9452b95e1a 100644 --- a/db/tests/schemas/operations/test_create.py +++ b/db/tests/schemas/operations/test_create.py @@ -1,22 +1,16 @@ -import pytest from unittest.mock import patch import db.schemas.operations.create as sch_create -@pytest.mark.parametrize( - "if_not_exists", [(True), (False), (None)] -) -def test_create_schema(engine_with_schema, if_not_exists): +def test_create_schema_via_sql_alchemy(engine_with_schema): engine = engine_with_schema with patch.object(sch_create, 'execute_msar_func_with_engine') as mock_exec: - sch_create.create_schema( + sch_create.create_schema_via_sql_alchemy( schema_name='new_schema', engine=engine, - comment=None, - if_not_exists=if_not_exists + description=None, ) call_args = mock_exec.call_args_list[0][0] assert call_args[0] == engine assert call_args[1] == "create_schema" assert call_args[2] == "new_schema" - assert call_args[3] == if_not_exists or False diff --git a/db/tests/schemas/operations/test_drop.py b/db/tests/schemas/operations/test_drop.py index cf513b11b7..243ec4fbd6 100644 --- a/db/tests/schemas/operations/test_drop.py +++ b/db/tests/schemas/operations/test_drop.py @@ -3,16 +3,13 @@ import db.schemas.operations.drop as sch_drop -@pytest.mark.parametrize( - "cascade, if_exists", [(True, True), (False, True), (True, False), (False, False)] -) -def test_drop_schema(engine_with_schema, cascade, if_exists): +@pytest.mark.parametrize("cascade", [True, False]) +def test_drop_schema(engine_with_schema, cascade): engine = engine_with_schema with patch.object(sch_drop, 'execute_msar_func_with_engine') as mock_exec: - sch_drop.drop_schema('drop_test_schema', engine, cascade, if_exists) + sch_drop.drop_schema_via_name(engine, 'drop_test_schema', cascade) call_args = mock_exec.call_args_list[0][0] assert call_args[0] == engine assert call_args[1] == "drop_schema" assert call_args[2] == "drop_test_schema" assert call_args[3] == cascade - assert call_args[4] == if_exists diff --git a/db/tests/schemas/operations/test_select.py b/db/tests/schemas/operations/test_select.py index 41ad8ba32b..35854d66ea 100644 --- a/db/tests/schemas/operations/test_select.py +++ b/db/tests/schemas/operations/test_select.py @@ -1,7 +1,7 @@ import warnings from sqlalchemy import select, Table, MetaData, text -from db import types +from db.constants import TYPES_SCHEMA from db.tables.operations import infer_types from db.schemas.operations import select as ssel @@ -27,7 +27,7 @@ def test_get_mathesar_schemas_with_oids_avoids_information_schema(engine_with_sc def test_get_mathesar_schemas_with_oids_avoids_types_schema(engine_with_schema): engine, schema = engine_with_schema actual_schemas = ssel.get_mathesar_schemas_with_oids(engine) - assert all([schema != types.base.SCHEMA for schema, _ in actual_schemas]) + assert all([schema != TYPES_SCHEMA for schema, _ in actual_schemas]) def test_get_mathesar_schemas_with_oids_avoids_temp_schema(engine_with_schema): diff --git a/db/tests/tables/operations/test_alter.py b/db/tests/tables/operations/test_alter.py index 993e307364..af1f0b9b0c 100644 --- a/db/tests/tables/operations/test_alter.py +++ b/db/tests/tables/operations/test_alter.py @@ -1,3 +1,5 @@ +import json + from unittest.mock import patch import db.tables.operations.alter as tab_alter @@ -29,3 +31,21 @@ def test_comment_on_table(engine_with_schema): assert call_args[2] == schema_name assert call_args[3] == "comment_on_me" assert call_args[4] == "This is a comment" + + +def test_alter_table(): + with patch.object(tab_alter.db_conn, 'exec_msar_func') as mock_exec: + tab_alter.alter_table_on_database( + 12345, + {"name": "newname", "description": "this is a comment", "columns": {}}, + "conn" + ) + call_args = mock_exec.call_args_list[0][0] + assert call_args[0] == "conn" + assert call_args[1] == "alter_table" + assert call_args[2] == 12345 + assert call_args[3] == json.dumps({ + "name": "newname", + "description": "this is a comment", + "columns": {}, + }) diff --git a/db/tests/tables/operations/test_select.py b/db/tests/tables/operations/test_select.py index 4f442c1f00..36c0cc8ec9 100644 --- a/db/tests/tables/operations/test_select.py +++ b/db/tests/tables/operations/test_select.py @@ -1,4 +1,5 @@ import sys +from unittest.mock import patch from sqlalchemy import text from db.columns.operations.select import get_column_name_from_attnum from db.tables.operations import select as ma_sel @@ -34,6 +35,14 @@ MULTIPLE_RESULTS = ma_sel.MULTIPLE_RESULTS +def test_get_table_info(): + with patch.object(ma_sel, 'exec_msar_func') as mock_exec: + mock_exec.return_value.fetchone = lambda: ('a', 'b') + result = ma_sel.get_table_info('schema', 'conn') + mock_exec.assert_called_once_with('conn', 'get_table_info', 'schema') + assert result == 'a' + + def _transform_row_to_names(row, engine): metadata = get_empty_metadata() output_dict = { diff --git a/db/tests/types/test_install.py b/db/tests/types/test_install.py index b10f08c883..533a6138e4 100644 --- a/db/tests/types/test_install.py +++ b/db/tests/types/test_install.py @@ -1,23 +1,4 @@ -from sqlalchemy import text -from db.types import install -from db.types import base from db.types.custom import email -from db.types.install import install_mathesar_on_database - - -def test_create_type_schema(engine): - install.create_type_schema(engine) - with engine.connect() as conn: - res = conn.execute(text("SELECT * FROM information_schema.schemata")) - schemata = {row['schema_name'] for row in res.fetchall()} - assert base.SCHEMA in schemata - - -def test_create_type_schema_when_exists(engine): - # This just checks that the function doesn't error if the type schema - # already exists when it's run. - install.create_type_schema(engine) - install.create_type_schema(engine) def test_create_email_when_exists(engine): @@ -25,9 +6,3 @@ def test_create_email_when_exists(engine): # already exists when it's run. email.install(engine) email.install(engine) - - -def test_install_when_exists(engine): - # This just checks that the function is idempotent - install_mathesar_on_database(engine) - install_mathesar_on_database(engine) diff --git a/db/types/base.py b/db/types/base.py index 2024aaeeb6..06e4290d1a 100644 --- a/db/types/base.py +++ b/db/types/base.py @@ -2,7 +2,7 @@ from sqlalchemy import text, create_engine as sa_create_engine -from db import constants +from db.constants import TYPES_SCHEMA from db.utils import OrderByIds @@ -143,8 +143,6 @@ class PostgresType(DatabaseType, Enum): UUID = 'uuid' -SCHEMA = f"{constants.MATHESAR_PREFIX}types" - # Since we want to have our identifiers quoted appropriately for use in # PostgreSQL, we want to use the postgres dialect preparer to set this up. _preparer = sa_create_engine("postgresql://").dialect.identifier_preparer @@ -154,7 +152,7 @@ def get_ma_qualified_schema(): """ Should usually return `mathesar_types` """ - return _preparer.quote_schema(SCHEMA) + return _preparer.quote_schema(TYPES_SCHEMA) # TODO rename to get_qualified_mathesar_obj_name diff --git a/db/types/categories.py b/db/types/categories.py index 6ed67db101..5110d0a49d 100644 --- a/db/types/categories.py +++ b/db/types/categories.py @@ -10,8 +10,6 @@ *STRING_TYPES, PostgresType.CHAR, PostgresType.NAME, - MathesarCustomType.URI, - MathesarCustomType.EMAIL, }) INTEGER_TYPES = frozenset({ diff --git a/db/types/custom/uri.py b/db/types/custom/uri.py index 27803db5ec..a69283843f 100644 --- a/db/types/custom/uri.py +++ b/db/types/custom/uri.py @@ -1,11 +1,12 @@ from enum import Enum import os -from sqlalchemy import text, Table, Column, String, MetaData, TEXT -from sqlalchemy.exc import ProgrammingError + +from sqlalchemy import TEXT, text from sqlalchemy.types import UserDefinedType -from psycopg2.errors import DuplicateTable +from psycopg.errors import DuplicateTable +from psycopg import sql -from db.types.base import MathesarCustomType, PostgresType, get_qualified_name, get_ma_qualified_schema +from db.types.base import MathesarCustomType, PostgresType, get_qualified_name from db.types.custom.underlying_type import HasUnderlyingType from db.utils import ignore_duplicate_wrapper @@ -81,20 +82,17 @@ def install(engine): conn.commit() -def install_tld_lookup_table(engine): - tlds_table = Table( - TLDS_TABLE_NAME, - MetaData(bind=engine), - Column("tld", String, primary_key=True), - schema=get_ma_qualified_schema(), - ) +def install_tld_lookup_table(conn): try: - tlds_table.create() - with engine.begin() as conn, open(TLDS_PATH) as f: - conn.execute( - tlds_table.insert(), - [{"tld": tld.strip().lower()} for tld in f if tld[:2] != "# "], - ) - except ProgrammingError as e: - if e.orig == DuplicateTable: - pass + create_tlds_table_sql = sql.SQL("CREATE TABLE mathesar_types.top_level_domains (tld text PRIMARY KEY)") + copy_tld_sql = sql.SQL("COPY mathesar_types.top_level_domains(tld) FROM STDIN") + + with open(TLDS_PATH) as f: + tld_insert = [(tld.strip().lower()) for tld in f if tld[:2] != "# "] + data_buffer = "\n".join(tld_insert).encode('utf-8') + with conn.transaction(): + conn.execute(create_tlds_table_sql) + with conn.cursor().copy(copy_tld_sql) as copy: + copy.write(data_buffer) + except DuplicateTable: + pass diff --git a/db/types/install.py b/db/types/install.py index 2b081f5aa2..27c5db696e 100644 --- a/db/types/install.py +++ b/db/types/install.py @@ -1,27 +1,10 @@ -from db.types.custom import email, money, multicurrency, uri, json_array, json_object -from db.types.base import SCHEMA -from db.schemas.operations.create import create_schema -from db.types.operations.cast import install_all_casts +from db.constants import TYPES_SCHEMA import psycopg -def create_type_schema(engine): - create_schema(SCHEMA, engine, if_not_exists=True) - - -def install_mathesar_on_database(engine): - create_type_schema(engine) - email.install(engine) - money.install(engine) - multicurrency.install(engine) - uri.install(engine) - uri.install_tld_lookup_table(engine) - json_array.install(engine) - json_object.install(engine) - install_all_casts(engine) - - def uninstall_mathesar_from_database(engine): conn_str = str(engine.url) with psycopg.connect(conn_str) as conn: - conn.execute(f"DROP SCHEMA IF EXISTS __msar, msar, {SCHEMA} CASCADE") + # TODO: Clean up this code so that it references all the schemas in our + # `INTERNAL_SCHEMAS` constant. + conn.execute(f"DROP SCHEMA IF EXISTS __msar, msar, {TYPES_SCHEMA} CASCADE") diff --git a/db/types/operations/cast.py b/db/types/operations/cast.py index 039dcc6171..dc46191395 100644 --- a/db/types/operations/cast.py +++ b/db/types/operations/cast.py @@ -44,7 +44,6 @@ def get_column_cast_expression(column, target_type, engine, type_options=None): def install_all_casts(engine): create_boolean_casts(engine) - create_date_casts(engine) create_decimal_number_casts(engine) create_email_casts(engine) create_integer_casts(engine) @@ -64,11 +63,6 @@ def create_boolean_casts(engine): create_cast_functions(PostgresType.BOOLEAN, type_body_map, engine) -def create_date_casts(engine): - type_body_map = _get_date_type_body_map() - create_cast_functions(PostgresType.DATE, type_body_map, engine) - - def create_json_casts(engine): json_types = categories.JSON_TYPES for db_type in json_types: diff --git a/demo/README.md b/demo/README.md deleted file mode 100644 index da2197bcbc..0000000000 --- a/demo/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# Demo mode - -This folder includes code related to enabling "demo mode" for Mathesar. - -When Mathesar is in demo mode it has the following special behavior: - -- A new database is created for every user session -- A banner displays atop the UI -- Analytics are enabled, sending data to the Mathesar team - -## Running - -To run Mathesar in demo mode locally, add the following to your local `.env` file: - -``` -DJANGO_SETTINGS_MODULE=demo.settings -MATHESAR_LIVE_DEMO_USERNAME=admin -MATHESAR_LIVE_DEMO_PASSWORD=password -``` - -`MATHESAR_LIVE_DEMO_USERNAME` and `MATHESAR_LIVE_DEMO_PASSWORD` are optional – if you set these, then the login credentials will be shown on the login page. If either is omitted, credentials will not be shown. - -If you've not yet created the template database, you'll have to run the Django management command for that: - -``` -docker exec mathesar_service_dev sh -c "python manage.py setup_demo_template_db" -``` - -That might require you to restart or [rebuild/recreate your docker compose environment](https://github.com/centerofci/mathesar/blob/develop/DEVELOPER_GUIDE.md#rebuilding-the-docker-images). - -## Details - -Below information is up-to-date as of time of writting (2023-07-05). - -### How demo datasets are defined - -The gist of it can be seen in [`load_datasets` function](https://github.com/centerofci/mathesar/blob/0d99ee984206a99c6743a319504a1d86621d71d5/demo/install/datasets.py#L18C11-L18C11). The [Arxiv dataset](https://github.com/centerofci/mathesar/blob/0d99ee984206a99c6743a319504a1d86621d71d5/demo/install/datasets.py#L73C18-L73C18) is special, however. It is regularly updated via a cron job, thus giving a demo of how Mathesar can share databases with other systems. - -### How each user gets a session and each session gets a database - -We use [Django middleware](https://github.com/centerofci/mathesar/blob/0d99ee984206a99c6743a319504a1d86621d71d5/demo/middleware.py#L19C19-L19C19) to intercept new guest sessions and perform the provisioning. Our intent is for each user to have his own database that will not be contaminated by other demo users. - -### How guest databases are provisioned (template database) - -When a new guest (demo user) is being provisioned, [we create a dedicated database for him, using Postgres' template databases feature](https://github.com/centerofci/mathesar/blob/0d99ee984206a99c6743a319504a1d86621d71d5/demo/install/base.py#L67C37-L67C37). Using a template database allows us to only have to copy it to create a new database with our demo datasets setup. - -The template database is setup [when our demo server is being provisioned by Ansible](https://github.com/centerofci/mathesar-ansible/blob/7a5db8bd13e86bbf191b4dd95e66cd138a512d36/roles/demo/tasks/main.yml#L39). [`setup_demo_template_db`](https://github.com/centerofci/mathesar/blob/0d99ee984206a99c6743a319504a1d86621d71d5/demo/management/commands/setup_demo_template_db.py#L11) is the Django management command that Ansible calls. The most interesting part of it is [loading the demo datasets](https://github.com/centerofci/mathesar/blob/0d99ee984206a99c6743a319504a1d86621d71d5/demo/install/datasets.py#L18). diff --git a/demo/__init__.py b/demo/__init__.py deleted file mode 100644 index 50e072800f..0000000000 --- a/demo/__init__.py +++ /dev/null @@ -1 +0,0 @@ -default_app_config = 'demo.apps.DemoConfig' diff --git a/demo/apps.py b/demo/apps.py deleted file mode 100644 index f1b2468c81..0000000000 --- a/demo/apps.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.apps import AppConfig - -TEMPLATE_INITIALIZED = 'TEMPLATE_INITIALIZED' - - -class DemoConfig(AppConfig): - """Initialization manager.""" - - name = "demo" - - def ready(self): - """Perform initialization tasks.""" - pass diff --git a/demo/db_namer.py b/demo/db_namer.py deleted file mode 100644 index 355a9f0346..0000000000 --- a/demo/db_namer.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Utility function for generating database names from session ID cookies.""" -WORDS = [ - 'wax', 'wry', 'jet', 'end', 'web', 'elf', 'jam', 'key', 'ant', 'fin', 'top', - 'ray', 'tan', 'box', 'saw', 'tie', 'spy', 'day', 'gym', 'fox', 'van', 'ear', - 'hub', 'age', 'hot', 'pin', 'red', 'map', 'run', 'den', 'bay', 'gas', 'nut', - 'toy', 'art', 'net', 'pea', 'yam', 'pet', 'hat', 'ivy', 'egg', 'log', 'mum', - 'rat', 'owl', 'hen', 'dry', 'bar', 'cub', 'dew', 'sun', 'lip', 'pig', 'tin', - 'mud', 'ox', 'wok', 'ink', 'ton', 'big', 'way', 'tax', 'amp', 'act', 'ice', - 'far', 'low', 'air', 'cut', 'oak', 'yak', 'eel', 'gem', 'few', 'ski', 'law', - 'sea', 'hip', 'bag', 'bug', 'shy', 'dot', 'dye', 'eye', 'jaw', 'rib', 'mat', - 'fir', 'bit', 'son', 'cat', 'six', 'fly', 'car', 'tub', 'emu', 'elk', 'tv', - 'sly', 'tip', 'fan', 'fur', 'yew', 'jay', 'bus', 'zoo', 'sky', 'elm', 'cry', - 'bee', 'tar', 'awe', 'bat', 'kip', 'cod', 'oil', 'foe', 'pot', 'ash', 'gum', - 'odd', 'icy', 'koi', 'bun', 'era', 'leo', 'lan', 'soy', 'paw', 'can', 'pan', - 'jar', 'pen', 'may', 'tea', 'ten', 'asp', 'dog', 'cap', 'bed', 'fog', 'bow', - 'cup', 'pie', 'cow', 'fig', 'boa' -] - -MODULUS = len(WORDS) - - -def get_name( - session_id, - chunk=2, - max_words=5, - join_val='_', - word_list=None, - modulus=None, -): - """ - Deterministically generate a name of the form word1_word2_word3_.... - - This is designed to work with Django sessionid cookie values, but - would theoretically work with any string value. - - Given `session_id`, we convert it to hex, then get a sequence of - numbers by breaking the resulting value into hex strings of length - `chunk`, then converting each to a decimal integer modulo `modulus`. - Finally, we find an adjective corresponding to each integer by its - index in the given `adjectives` list, and join them with a terminal - string 'mathesar' using the given `join_val`. - """ - hex_chunk = 2 * chunk - bytes_length = chunk * max_words - session_id = session_id or '' - modulus = modulus or MODULUS - word_list = word_list or WORDS - salt_gen = 'A' # generates 'paw_paw_paw_...' after transformation - session_id += salt_gen * (bytes_length - len(session_id)) - session_hex = bytes(session_id, 'utf-8').hex() - indices = ( - int(session_hex[i:i + hex_chunk], 16) % modulus - for i in range(0, 2 * bytes_length, hex_chunk) - ) - return join_val.join(word_list[i] for i in indices) diff --git a/demo/install/arxiv_skeleton.py b/demo/install/arxiv_skeleton.py deleted file mode 100644 index fe1af2e577..0000000000 --- a/demo/install/arxiv_skeleton.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -We load the arXiv data set by first setting up a skeleton defining the data -model whenever a user starts a new demo, and then loading data into the data -model via a cron job that runs a management command. -""" - -import json -from pathlib import Path - -from django.conf import settings -from sqlalchemy import text -from demo.install.base import ARXIV_SETUP_SQL - - -def setup_and_register_schema_for_receiving_arxiv_data( - engine, schema_name='Latest Papers from arXiv' -): - db_name, schema_name = _setup_arxiv_schema(engine, schema_name) - append_db_and_arxiv_schema_to_log(db_name, schema_name) - - -def _make_sure_parent_directories_present(path_to_file): - path_to_file = Path(path_to_file) - parent_directory_of_file = path_to_file.parent - parent_directory_of_file.mkdir(parents=True, exist_ok=True) - - -def append_db_and_arxiv_schema_to_log(db_name, schema_name): - path = get_arxiv_db_and_schema_log_path() - _make_sure_parent_directories_present(path) - if db_name == getattr(settings, 'MATHESAR_DEMO_TEMPLATE', None): - return - db_and_schema = [db_name, schema_name] - with open(path, 'a') as f: - json.dump(db_and_schema, f) - f.write('\n') - - -def get_arxiv_db_and_schema_log_path(): - return Path(settings.MATHESAR_DEMO_ARXIV_LOG_PATH).absolute() - - -def _setup_arxiv_schema(engine, schema_name): - schema_description = ( - "Regularly updated by a script that gets the 50 most recent Computer" - " Science research papers from arXiv and inserts it into this schema." - ) - drop_schema_query = text(f'DROP SCHEMA IF EXISTS "{schema_name}" CASCADE;') - create_schema_query = text(f'CREATE SCHEMA "{schema_name}";') - set_search_path = text(f'SET search_path="{schema_name}";') - set_schema_comment_query = text( - f'COMMENT ON SCHEMA "{schema_name}"' - f'IS $escape_token${schema_description}$escape_token$;' - ) - with engine.begin() as conn, open(ARXIV_SETUP_SQL) as f: - conn.execute(drop_schema_query) - conn.execute(create_schema_query) - conn.execute(set_search_path) - conn.execute(text(f.read())) - conn.execute(set_schema_comment_query) - db_name = engine.url.database - return db_name, schema_name diff --git a/demo/install/base.py b/demo/install/base.py deleted file mode 100644 index 24869ac382..0000000000 --- a/demo/install/base.py +++ /dev/null @@ -1,72 +0,0 @@ -"""The demo.install namespace contains logic for setting up new demo instances.""" -import os - -from sqlalchemy import text -from sqlalchemy.exc import OperationalError - -from db.engine import create_future_engine - -from mathesar.models.base import Table, Schema - -FILE_DIR = os.path.abspath(os.path.dirname(__file__)) -RESOURCES = os.path.join(FILE_DIR, "resources") -LIBRARY_ONE = os.path.join(RESOURCES, "library_without_checkouts.sql") -LIBRARY_TWO = os.path.join(RESOURCES, "library_add_checkouts.sql") -DEVCON_DATASET = os.path.join(RESOURCES, "devcon_dataset.sql") -MOVIES_SQL_TABLES = os.path.join(RESOURCES, "movie_collection_tables.sql") -MOVIES_SQL_FKS = os.path.join(RESOURCES, "movie_collection_fks.sql") -MOVIES_CSV = os.path.join(RESOURCES, 'movies_csv') -ARXIV_SETUP_SQL = os.path.join(RESOURCES, 'arxiv_dataset_setup.sql') -ARXIV_PAPERS_PICKLE = os.path.join(RESOURCES, 'arxiv_papers.pickle') -LIBRARY_MANAGEMENT = 'Library Management' -MOVIE_COLLECTION = 'Movie Collection' -MATHESAR_CON = 'Mathesar Con' -ARXIV = 'Latest Papers from arXiv' - - -def get_dj_schema_by_name(engine, name): - """Find a schema with a given name in the given DB.""" - db_name = engine.url.database - schemas = Schema.objects.filter(database__name=db_name) - for s in schemas: - if s.name == name: - return s - - -def get_dj_table_by_name(schema, name): - """Find a table with a given name in the given schema.""" - tables = Table.objects.filter(schema=schema) - for t in tables: - if t.name == name: - return t - - -def get_dj_column_by_name(table, name): - """Find a column with a given name in the given table.""" - columns = table.columns.all() - for c in columns: - if c.name == name: - return c - - -def create_demo_database( - user_db, username, password, hostname, root_db, port, template_db -): - """Create database, install Mathesar on it, add demo data.""" - user_db_engine = create_future_engine( - username, password, hostname, user_db, port - ) - try: - user_db_engine.connect() - user_db_engine.dispose() - print(f"Database {user_db} already exists! Skipping...") - except OperationalError: - root_db_engine = create_future_engine( - username, password, hostname, root_db, port, - ) - with root_db_engine.connect() as conn: - conn.execution_options(isolation_level="AUTOCOMMIT") - conn.execute(text(f"CREATE DATABASE {user_db} TEMPLATE {template_db};")) - root_db_engine.dispose() - user_db_engine.dispose() - print(f"Created DB is {user_db}.") diff --git a/demo/install/custom_settings.py b/demo/install/custom_settings.py deleted file mode 100644 index e079d5b0e1..0000000000 --- a/demo/install/custom_settings.py +++ /dev/null @@ -1,37 +0,0 @@ -"""This module contains logic for setting up custom display options.""" -from demo.install.base import ( - LIBRARY_MANAGEMENT, MATHESAR_CON, - get_dj_column_by_name, get_dj_schema_by_name, get_dj_table_by_name, -) -from mathesar.models.base import PreviewColumnSettings - - -def customize_settings(engine): - """Set preview settings so demo data looks good.""" - _customize_library_preview_settings(engine) - _customize_devcon_preview_settings(engine) - - -def _customize_library_preview_settings(engine): - schema = get_dj_schema_by_name(engine, LIBRARY_MANAGEMENT) - authors = get_dj_table_by_name(schema, 'Authors') - _set_first_and_last_names_preview(authors) - patrons = get_dj_table_by_name(schema, 'Patrons') - _set_first_and_last_names_preview(patrons) - - -def _customize_devcon_preview_settings(engine): - schema = get_dj_schema_by_name(engine, MATHESAR_CON) - presenters = get_dj_table_by_name(schema, 'Speakers') - _set_first_and_last_names_preview(presenters) - - -def _set_first_and_last_names_preview(table): - first_name = get_dj_column_by_name(table, 'First Name') - last_name = get_dj_column_by_name(table, 'Last Name') - template = f'{{{first_name.id}}} {{{last_name.id}}}' - new_preview_settings = PreviewColumnSettings.objects.create( - customized=True, template=template - ) - table.settings.preview_settings = new_preview_settings - table.settings.save() diff --git a/demo/install/datasets.py b/demo/install/datasets.py deleted file mode 100644 index af8bcf84e2..0000000000 --- a/demo/install/datasets.py +++ /dev/null @@ -1,54 +0,0 @@ -"""This module contains functions to load datasets for the demo.""" -import logging -import pickle - -from sqlalchemy import text - -from demo.install.arxiv_skeleton import setup_and_register_schema_for_receiving_arxiv_data -from demo.install.library_dataset import load_library_dataset -from demo.install.movies_dataset import load_movies_dataset -from demo.management.commands.load_arxiv_data import update_arxiv_schema -from demo.install.base import ( - MATHESAR_CON, DEVCON_DATASET, - ARXIV, ARXIV_PAPERS_PICKLE, -) - - -def load_datasets(engine): - """Load some SQL files with demo data to DB targeted by `engine`.""" - load_library_dataset(engine) - load_movies_dataset(engine) - _load_devcon_dataset(engine) - _load_arxiv_data_skeleton(engine) - - -def _load_devcon_dataset(engine): - drop_schema_query = text(f"""DROP SCHEMA IF EXISTS "{MATHESAR_CON}" CASCADE;""") - create_schema_query = text(f"""CREATE SCHEMA "{MATHESAR_CON}";""") - set_search_path = text(f"""SET search_path="{MATHESAR_CON}";""") - with engine.begin() as conn, open(DEVCON_DATASET) as f: - conn.execute(drop_schema_query) - conn.execute(create_schema_query) - conn.execute(set_search_path) - conn.execute(text(f.read())) - - -def _load_arxiv_data_skeleton(engine): - schema_name = ARXIV - setup_and_register_schema_for_receiving_arxiv_data(engine, schema_name=schema_name) - _load_arxiv_dataset(engine, schema_name=schema_name) - - -def _load_arxiv_dataset(engine, schema_name): - """ - Defined separately, because this dataset does not need a dataset per-se; it's meant to be - updated via cron. We're preloading some data so that it doesn't start off empty. - - Does not propogate if there's a failure. - """ - try: - with open(ARXIV_PAPERS_PICKLE, 'rb') as f: - papers = pickle.load(f) - update_arxiv_schema(engine, schema_name, papers) - except Exception as e: - logging.error(e, exc_info=True) diff --git a/demo/install/dumpcsvs.py b/demo/install/dumpcsvs.py deleted file mode 100644 index 23feb3daee..0000000000 --- a/demo/install/dumpcsvs.py +++ /dev/null @@ -1,40 +0,0 @@ -""" -Dump data for all the tables of a provided schema to separate {table_name}.csv files -with header as column names. - -Usage: python dumpcsvs.py -""" -import psycopg -import csv - -DB_NAME = "mathesar" -DB_USER = "mathesar" -DB_PASSWORD = "mathesar" -DB_HOST = "mathesar_dev_db" -SCHEMA_NAME = "Movie Collection" - -conn = psycopg.connect( - dbname=DB_NAME, - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=5432 -) - -# get names of tables. -tables = conn.execute( - f"SELECT table_name FROM information_schema.tables WHERE table_schema = '{SCHEMA_NAME}'" -).fetchall() - -for table in tables: - table_name = table[0] - with open(f'{table_name}.csv', 'w', newline="") as csv_file: - csv_writer = csv.writer(csv_file) - columns = conn.execute( - f"""SELECT column_name FROM information_schema.columns WHERE - table_schema = '{SCHEMA_NAME}' AND table_name = '{table_name}';""" - ).fetchall() - columns = [column[0] for column in columns] - csv_writer.writerow(columns) - with conn.cursor().copy(f"""COPY "{SCHEMA_NAME}"."{table_name}" TO STDOUT""") as copy: - csv_writer.writerows(copy.rows()) diff --git a/demo/install/explorations.py b/demo/install/explorations.py deleted file mode 100644 index 2d5ec929c4..0000000000 --- a/demo/install/explorations.py +++ /dev/null @@ -1,403 +0,0 @@ -"""This module contains logic for creating premade explorations of demo data.""" -from demo.install.base import ( - LIBRARY_MANAGEMENT, MATHESAR_CON, ARXIV, - get_dj_column_by_name, get_dj_schema_by_name, get_dj_table_by_name, -) -from mathesar.models.query import UIQuery - - -def load_custom_explorations(engine): - """Create some premade explorations to look at demo data.""" - _create_checkout_monthly_report(engine) - _create_overdue_books_report(engine) - _create_topics_by_organization_view(engine) - _create_organizations_by_topic_view(engine) - _create_paper_authors_view(engine) - - -def _create_checkout_monthly_report(engine): - schema = get_dj_schema_by_name(engine, LIBRARY_MANAGEMENT) - checkouts = get_dj_table_by_name(schema, "Checkouts") - initial_columns = [ - { - "id": get_dj_column_by_name(checkouts, "id").id, - "alias": "Checkouts_id", - }, - { - "id": get_dj_column_by_name(checkouts, "Checkout Time").id, - "alias": "Checkouts_Checkout Time", - }, - ] - transformations = [ - { - "spec": { - "base_grouping_column": "Checkouts_Checkout Time", - "grouping_expressions": [ - { - "preproc": "truncate_to_month", - "input_alias": "Checkouts_Checkout Time", - "output_alias": "Checkouts_Checkout Time_grouped" - } - ], - "aggregation_expressions": [ - { - "function": "count", - "input_alias": "Checkouts_id", - "output_alias": "Checkouts_id_agged" - } - ] - }, - "type": "summarize" - } - ] - display_names = { - "Checkouts_id": "Checkouts_id", - "Checkouts_id_agged": "Number of Checkouts", - "Checkouts_Checkout Time": "Checkouts_Checkout Time", - "Checkouts_Checkout Time_grouped": "Month" - } - - UIQuery.objects.create( - name="Monthly Checkouts", - description="This report gives the number of checkouts each month.", - base_table=checkouts, - initial_columns=initial_columns, - transformations=transformations, - display_names=display_names, - ) - - -def _create_overdue_books_report(engine): - schema = get_dj_schema_by_name(engine, LIBRARY_MANAGEMENT) - books = get_dj_table_by_name(schema, "Books") - checkouts = get_dj_table_by_name(schema, "Checkouts") - items = get_dj_table_by_name(schema, "Items") - patrons = get_dj_table_by_name(schema, "Patrons") - initial_columns = [ - { - "id": get_dj_column_by_name(patrons, "Email").id, - "alias": "Patrons_Email", - "jp_path": [ - [ - get_dj_column_by_name(checkouts, "Patron").id, - get_dj_column_by_name(patrons, "id").id - ], - ] - }, { - "id": get_dj_column_by_name(books, "Title").id, - "alias": "Books_Title", - "jp_path": [ - [ - get_dj_column_by_name(checkouts, "Item").id, - get_dj_column_by_name(items, "id").id, - ], [ - get_dj_column_by_name(items, "Book").id, - get_dj_column_by_name(books, "id").id, - ] - ] - }, { - "id": get_dj_column_by_name(checkouts, "Due Date").id, - "alias": "Checkouts_Due Date" - }, { - "id": get_dj_column_by_name(checkouts, "Check In Time").id, - "alias": "Checkouts_Check In Time" - }, { - "id": get_dj_column_by_name(checkouts, "id").id, - "alias": "Checkouts_id" - }, - ] - transformations = [ - { - "spec": { - "lesser": [ - {"column_name": ["Checkouts_Due Date"]}, - {"literal": ["2023-01-20"]} - ] - }, - "type": "filter" - }, { - "spec": { - "null": [ - {"column_name": ["Checkouts_Check In Time"]} - ] - }, - "type": "filter" - }, { - "spec": [ - "Checkouts_Due Date", "Checkouts_Check In Time", "Checkouts_id" - ], - "type": "hide" - }, { - "spec": { - "base_grouping_column": "Patrons_Email", - "grouping_expressions": [ - { - "input_alias": "Patrons_Email", - "output_alias": "Patrons_Email_grouped" - } - ], - "aggregation_expressions": [ - { - "function": "distinct_aggregate_to_array", - "input_alias": "Books_Title", - "output_alias": "Books_Title_agged" - } - ] - }, - "type": "summarize" - } - ] - display_names = { - "Books_Title": "Books_Title", - "Checkouts_id": "Checkouts_id", - "Patrons_Email": "Patrons_Email", - "Checkouts_Due Date": "Checkouts_Due Date", - "Checkouts_Check In Time": "Checkouts_Check In Time", - "Patrons_Email_grouped": "Patron Email", - "Books_Title_agged": "Overdue Books", - } - description = "This shows each patron's overdue books if they have any." - - UIQuery.objects.create( - name="Overdue Book Report", - description=description, - base_table=checkouts, - initial_columns=initial_columns, - transformations=transformations, - display_names=display_names, - ) - - -def _create_topics_by_organization_view(engine): - schema = get_dj_schema_by_name(engine, MATHESAR_CON) - - organizations = get_dj_table_by_name(schema, "Organizations") - speakers = get_dj_table_by_name(schema, "Speakers") - talks = get_dj_table_by_name(schema, "Talks") - topics = get_dj_table_by_name(schema, "Topics") - tt_map = get_dj_table_by_name(schema, "Talk Topic Map") - - initial_columns = [ - { - "id": get_dj_column_by_name(talks, "id").id, - "alias": "Talks_id" - }, { - "id": get_dj_column_by_name(topics, "Name").id, - "alias": "Topics_Name", - "jp_path": [ - [ - get_dj_column_by_name(talks, "id").id, - get_dj_column_by_name(tt_map, "Talk").id, - ], [ - get_dj_column_by_name(tt_map, "Topic").id, - get_dj_column_by_name(topics, "id").id, - ] - ] - }, { - "id": get_dj_column_by_name(organizations, "Organization").id, - "alias": "Organizations_Organization", - "jp_path": [ - [ - get_dj_column_by_name(talks, "Speaker").id, - get_dj_column_by_name(speakers, "id").id, - ], [ - get_dj_column_by_name(speakers, "Organization").id, - get_dj_column_by_name(organizations, "id").id, - ] - ] - } - ] - transformations = [ - { - "spec": ["Talks_id"], - "type": "hide" - }, { - "spec": { - "base_grouping_column": "Organizations_Organization", - "grouping_expressions": [ - { - "input_alias": "Organizations_Organization", - "output_alias": "Organizations_Organization_grouped" - } - ], - "aggregation_expressions": [ - { - "function": "distinct_aggregate_to_array", - "input_alias": "Topics_Name", - "output_alias": "Topics_Name_agged" - } - ] - }, - "type": "summarize" - } - ] - display_names = { - "Talks_id": "Talks_id", - "Topics_Name": "Topics_Name", - "Topics_Name_agged": "Topics", - "Organizations_Organization": "Organizations_Organization", - "Organizations_Organization_grouped": "Organization" - } - description = "This gives a list of topics each organization is giving a talk about." - - UIQuery.objects.create( - name="Topics by Organization", - description=description, - base_table=talks, - initial_columns=initial_columns, - transformations=transformations, - display_names=display_names, - ) - - -def _create_organizations_by_topic_view(engine): - schema = get_dj_schema_by_name(engine, MATHESAR_CON) - - organizations = get_dj_table_by_name(schema, "Organizations") - speakers = get_dj_table_by_name(schema, "Speakers") - talks = get_dj_table_by_name(schema, "Talks") - topics = get_dj_table_by_name(schema, "Topics") - tt_map = get_dj_table_by_name(schema, "Talk Topic Map") - - initial_columns = [ - { - "id": get_dj_column_by_name(talks, "id").id, - "alias": "Talks_id" - }, { - "id": get_dj_column_by_name(topics, "Name").id, - "alias": "Topics_Name", - "jp_path": [ - [ - get_dj_column_by_name(talks, "id").id, - get_dj_column_by_name(tt_map, "Talk").id, - ], [ - get_dj_column_by_name(tt_map, "Topic").id, - get_dj_column_by_name(topics, "id").id, - ] - ] - }, { - "id": get_dj_column_by_name(organizations, "Organization").id, - "alias": "Organizations_Organization", - "jp_path": [ - [ - get_dj_column_by_name(talks, "Speaker").id, - get_dj_column_by_name(speakers, "id").id, - ], [ - get_dj_column_by_name(speakers, "Organization").id, - get_dj_column_by_name(organizations, "id").id, - ] - ] - } - ] - transformations = [ - { - "spec": ["Talks_id"], - "type": "hide" - }, { - "spec": { - "base_grouping_column": "Topics_Name", - "grouping_expressions": [ - { - "input_alias": "Topics_Name", - "output_alias": "Topics_Name_grouped" - } - ], - "aggregation_expressions": [ - { - "function": "distinct_aggregate_to_array", - "input_alias": "Organizations_Organization", - "output_alias": "Organizations_Organization_agged" - } - ] - }, - "type": "summarize" - } - ] - display_names = { - "Talks_id": "Talks_id", - "Topics_Name": "Topics_Name", - "Topics_Name_grouped": "Topic", - "Organizations_Organization": "Organizations_Organization", - "Organizations_Organization_agged": "Organizations" - } - description = "This gives a list of organizations giving talks about each topic." - - UIQuery.objects.create( - name="Organizations by Topic", - description=description, - base_table=talks, - initial_columns=initial_columns, - transformations=transformations, - display_names=display_names, - ) - - -def _create_paper_authors_view(engine): - schema = get_dj_schema_by_name(engine, ARXIV) - - papers = get_dj_table_by_name(schema, "Papers") - pa_map = get_dj_table_by_name(schema, "Paper-Author Map") - authors = get_dj_table_by_name(schema, "Authors") - initial_columns = [ - { - "id": get_dj_column_by_name(papers, "id").id, - "alias": "Papers_id" - }, { - "id": get_dj_column_by_name(papers, "Title").id, - "alias": "Papers_Title" - }, { - "id": get_dj_column_by_name(authors, "Name").id, - "alias": "Authors_Name", - "jp_path": [ - [ - get_dj_column_by_name(papers, "id").id, - get_dj_column_by_name(pa_map, "paper_id").id, - ], [ - get_dj_column_by_name(pa_map, "author_id").id, - get_dj_column_by_name(authors, "id").id, - ] - ] - } - ] - transformations = [ - { - "spec": { - "base_grouping_column": "Papers_id", - "grouping_expressions": [ - { - "input_alias": "Papers_id", - "output_alias": "Papers_id_grouped" - }, { - "input_alias": "Papers_Title", - "output_alias": "Papers_Title_grouped" - } - ], - "aggregation_expressions": [ - { - "function": "distinct_aggregate_to_array", - "input_alias": "Authors_Name", - "output_alias": "Authors_Name_agged" - } - ] - }, - "type": "summarize" - } - ] - display_names = { - "Papers_id": "Papers_id", - "Authors_Name": "Authors_Name", - "Papers_Title": "Papers_Title", - "Papers_id_grouped": "Paper id", - "Authors_Name_agged": "Authors", - "Papers_Title_grouped": "Paper Title" - } - description = "This report gives the title of each paper, along with a list of its authors." - - UIQuery.objects.create( - name="Paper Authors", - description=description, - base_table=papers, - initial_columns=initial_columns, - transformations=transformations, - display_names=display_names, - ) diff --git a/demo/install/library_dataset.py b/demo/install/library_dataset.py deleted file mode 100644 index 14c1f39b32..0000000000 --- a/demo/install/library_dataset.py +++ /dev/null @@ -1,29 +0,0 @@ -"""This module contains functions to load the Library Management dataset.""" - -from sqlalchemy import text -from demo.install.base import LIBRARY_MANAGEMENT, LIBRARY_ONE, LIBRARY_TWO - - -def load_library_dataset(engine, safe_mode=False): - """ - Load the library dataset into a "Library Management" schema. - - Args: - engine: an SQLAlchemy engine defining the connection to load data into. - safe_mode: When True, we will throw an error if the "Library Management" - schema already exists instead of dropping it. - - Uses given engine to define database to load into. - Destructive, and will knock out any previous "Library Management" - schema in the given database, unless safe_mode=True. - """ - drop_schema_query = text(f"""DROP SCHEMA IF EXISTS "{LIBRARY_MANAGEMENT}" CASCADE;""") - create_schema_query = text(f"""CREATE SCHEMA "{LIBRARY_MANAGEMENT}";""") - set_search_path = text(f"""SET search_path="{LIBRARY_MANAGEMENT}";""") - with engine.begin() as conn, open(LIBRARY_ONE) as f1, open(LIBRARY_TWO) as f2: - if safe_mode is False: - conn.execute(drop_schema_query) - conn.execute(create_schema_query) - conn.execute(set_search_path) - conn.execute(text(f1.read())) - conn.execute(text(f2.read())) diff --git a/demo/install/movies_dataset.py b/demo/install/movies_dataset.py deleted file mode 100644 index b45c1dbeaa..0000000000 --- a/demo/install/movies_dataset.py +++ /dev/null @@ -1,29 +0,0 @@ -"""This module contains functions to load the Movie Collection dataset.""" -import os -from sqlalchemy import text - -from demo.install.base import MOVIE_COLLECTION, MOVIES_SQL_TABLES, MOVIES_CSV, MOVIES_SQL_FKS - - -def load_movies_dataset(engine, safe_mode=False): - """ - Load the movie demo data set. - - Args: - engine: an SQLAlchemy engine defining the connection to load data into. - safe_mode: When True, we will throw an error if the "Movie Collection" - schema already exists instead of dropping it. - """ - drop_schema_query = text(f"""DROP SCHEMA IF EXISTS "{MOVIE_COLLECTION}" CASCADE;""") - with engine.begin() as conn, open(MOVIES_SQL_TABLES) as f, open(MOVIES_SQL_FKS) as f2: - if safe_mode is False: - conn.execute(drop_schema_query) - conn.execute(text(f.read())) - for file in os.scandir(MOVIES_CSV): - table_name = file.name.split('.csv')[0] - with open(file, 'r') as csv_file: - conn.connection.cursor().copy_expert( - f"""COPY "{MOVIE_COLLECTION}"."{table_name}" FROM STDIN DELIMITER ',' CSV HEADER""", - csv_file - ) - conn.execute(text(f2.read())) diff --git a/demo/install/resources/arxiv_dataset_setup.sql b/demo/install/resources/arxiv_dataset_setup.sql deleted file mode 100644 index df1f1bae77..0000000000 --- a/demo/install/resources/arxiv_dataset_setup.sql +++ /dev/null @@ -1,289 +0,0 @@ --- Authors - -CREATE TABLE "Authors" ( - id SERIAL PRIMARY KEY, - "Name" text UNIQUE -); - --- Categories - -CREATE TABLE "Categories" ( - id text PRIMARY KEY, - "Name" text, - "Description" text -); - --- Links - -CREATE TABLE "Links" ( - id SERIAL PRIMARY KEY, - "URL" mathesar_types.uri UNIQUE, - "Purpose" text -); - -COMMENT ON TABLE "Links" IS 'Links (URLs) associated with a given paper; each paper is expected to at least have a link to its PDF version.'; - --- Papers - -CREATE TABLE "Papers" ( - id SERIAL PRIMARY KEY, - "Title" text, - "Summary" text, - "Journal reference" text, - "Primary category" text references "Categories"(id), - "Updated" timestamp, - "Published" timestamp, - "Comment" text, - "DOI" text, - "arXiv URL" mathesar_types.uri UNIQUE -); - -COMMENT ON TABLE "Papers" IS 'Academic papers sourced from the arXiv API.'; - --- Paper-Author map table - -CREATE TABLE "Paper-Author Map" ( - id SERIAL PRIMARY KEY, - paper_id int, - author_id int, - UNIQUE (paper_id, author_id), - CONSTRAINT fk_paper FOREIGN KEY(paper_id) REFERENCES "Papers"(id), - CONSTRAINT fk_author FOREIGN KEY(author_id) REFERENCES "Authors"(id) -); - -COMMENT ON TABLE "Paper-Author Map" IS 'Maps papers to authors.'; - --- Paper-Category map table - -CREATE TABLE "Paper-Category Map" ( - id SERIAL PRIMARY KEY, - paper_id int, - category_id text, - UNIQUE (paper_id, category_id), - CONSTRAINT fk_paper FOREIGN KEY(paper_id) REFERENCES "Papers"(id), - CONSTRAINT fk_category FOREIGN KEY(category_id) REFERENCES "Categories"(id) -); - -COMMENT ON TABLE "Paper-Category Map" IS 'Maps papers to categories.'; - --- Paper-Link map table - -CREATE TABLE "Paper-Link Map" ( - id SERIAL PRIMARY KEY, - paper_id int, - link_id int, - UNIQUE (paper_id, link_id), - CONSTRAINT fk_paper FOREIGN KEY(paper_id) REFERENCES "Papers"(id), - CONSTRAINT fk_link FOREIGN KEY(link_id) REFERENCES "Links"(id) -); - -COMMENT ON TABLE "Paper-Link Map" IS 'Maps papers to links.'; - --- Populate Categories with Computer Science information from https://arxiv.org/category_taxonomy - -INSERT INTO "Categories" ( - id, - "Name", - "Description" -) -VALUES -( - 'cs.AI', - 'Artificial Intelligence', - 'Covers all areas of AI except Vision, Robotics, Machine Learning, Multiagent Systems, and Computation and Language (Natural Language Processing), which have separate subject areas. In particular, includes Expert Systems, Theorem Proving (although this may overlap with Logic in Computer Science), Knowledge Representation, Planning, and Uncertainty in AI. Roughly includes material in ACM Subject Classes I.2.0, I.2.1, I.2.3, I.2.4, I.2.8, and I.2.11.' -), -( - 'cs.AR', - 'Hardware Architecture', - 'Covers systems organization and hardware architecture. Roughly includes material in ACM Subject Classes C.0, C.1, and C.5.' -), -( - 'cs.CC', - 'Computational Complexity', - 'Covers models of computation, complexity classes, structural complexity, complexity tradeoffs, upper and lower bounds. Roughly includes material in ACM Subject Classes F.1 (computation by abstract devices), F.2.3 (tradeoffs among complexity measures), and F.4.3 (formal languages), although some material in formal languages may be more appropriate for Logic in Computer Science. Some material in F.2.1 and F.2.2, may also be appropriate here, but is more likely to have Data Structures and Algorithms as the primary subject area.' -), -( - 'cs.CE', - 'Computational Engineering, Finance, and Science', - 'Covers applications of computer science to the mathematical modeling of complex systems in the fields of science, engineering, and finance. Papers here are interdisciplinary and applications-oriented, focusing on techniques and tools that enable challenging computational simulations to be performed, for which the use of supercomputers or distributed computing platforms is often required. Includes material in ACM Subject Classes J.2, J.3, and J.4 (economics).' -), -( - 'cs.CG', - 'Computational Geometry', - 'Roughly includes material in ACM Subject Classes I.3.5 and F.2.2.' -), -( - 'cs.CL', - 'Computation and Language', - 'Covers natural language processing. Roughly includes material in ACM Subject Class I.2.7. Note that work on artificial languages (programming languages, logics, formal systems) that does not explicitly address natural-language issues broadly construed (natural-language processing, computational linguistics, speech, text retrieval, etc.) is not appropriate for this area.' -), -( - 'cs.CR', - 'Cryptography and Security', - 'Covers all areas of cryptography and security including authentication, public key cryptosytems, proof-carrying code, etc. Roughly includes material in ACM Subject Classes D.4.6 and E.3.' -), -( - 'cs.CV', - 'Computer Vision and Pattern Recognition', - 'Covers image processing, computer vision, pattern recognition, and scene understanding. Roughly includes material in ACM Subject Classes I.2.10, I.4, and I.5.' -), -( - 'cs.CY', - 'Computers and Society', - 'Covers impact of computers on society, computer ethics, information technology and public policy, legal aspects of computing, computers and education. Roughly includes material in ACM Subject Classes K.0, K.2, K.3, K.4, K.5, and K.7.' -), -( - 'cs.DB', - 'Databases', - 'Covers database management, datamining, and data processing. Roughly includes material in ACM Subject Classes E.2, E.5, H.0, H.2, and J.1.' -), -( - 'cs.DC', - 'Distributed, Parallel, and Cluster Computing', - 'Covers fault-tolerance, distributed algorithms, stabilility, parallel computation, and cluster computing. Roughly includes material in ACM Subject Classes C.1.2, C.1.4, C.2.4, D.1.3, D.4.5, D.4.7, E.1.' -), -( - 'cs.DL', - 'Digital Libraries', - 'Covers all aspects of the digital library design and document and text creation. Note that there will be some overlap with Information Retrieval (which is a separate subject area). Roughly includes material in ACM Subject Classes H.3.5, H.3.6, H.3.7, I.7.' -), -( - 'cs.DM', - 'Discrete Mathematics', - 'Covers combinatorics, graph theory, applications of probability. Roughly includes material in ACM Subject Classes G.2 and G.3.' -), -( - 'cs.DS', - 'Data Structures and Algorithms', - 'Covers data structures and analysis of algorithms. Roughly includes material in ACM Subject Classes E.1, E.2, F.2.1, and F.2.2.' -), -( - 'cs.ET', - 'Emerging Technologies', - 'Covers approaches to information processing (computing, communication, sensing) and bio-chemical analysis based on alternatives to silicon CMOS-based technologies, such as nanoscale electronic, photonic, spin-based, superconducting, mechanical, bio-chemical and quantum technologies (this list is not exclusive). Topics of interest include (1) building blocks for emerging technologies, their scalability and adoption in larger systems, including integration with traditional technologies, (2) modeling, design and optimization of novel devices and systems, (3) models of computation, algorithm design and programming for emerging technologies.' -), -( - 'cs.FL', - 'Formal Languages and Automata Theory', - 'Covers automata theory, formal language theory, grammars, and combinatorics on words. This roughly corresponds to ACM Subject Classes F.1.1, and F.4.3. Papers dealing with computational complexity should go to cs.CC; papers dealing with logic should go to cs.LO.' -), -( - 'cs.GL', - 'General Literature', - 'Covers introductory material, survey material, predictions of future trends, biographies, and miscellaneous computer-science related material. Roughly includes all of ACM Subject Class A, except it does not include conference proceedings (which will be listed in the appropriate subject area).' -), -( - 'cs.GR', - 'Graphics', - 'Covers all aspects of computer graphics. Roughly includes material in all of ACM Subject Class I.3, except that I.3.5 is is likely to have Computational Geometry as the primary subject area.' -), -( - 'cs.GT', - 'Computer Science and Game Theory', - 'Covers all theoretical and applied aspects at the intersection of computer science and game theory, including work in mechanism design, learning in games (which may overlap with Learning), foundations of agent modeling in games (which may overlap with Multiagent systems), coordination, specification and formal methods for non-cooperative computational environments. The area also deals with applications of game theory to areas such as electronic commerce.' -), -( - 'cs.HC', - 'Human-Computer Interaction', - 'Covers human factors, user interfaces, and collaborative computing. Roughly includes material in ACM Subject Classes H.1.2 and all of H.5, except for H.5.1, which is more likely to have Multimedia as the primary subject area.' -), -( - 'cs.IR', - 'Information Retrieval', - 'Covers indexing, dictionaries, retrieval, content and analysis. Roughly includes material in ACM Subject Classes H.3.0, H.3.1, H.3.2, H.3.3, and H.3.4.' -), -( - 'cs.IT', - 'Information Theory', - 'Covers theoretical and experimental aspects of information theory and coding. Includes material in ACM Subject Class E.4 and intersects with H.1.1.' -), -( - 'cs.LG', - 'Machine Learning', - 'Papers on all aspects of machine learning research (supervised, unsupervised, reinforcement learning, bandit problems, and so on) including also robustness, explanation, fairness, and methodology. cs.LG is also an appropriate primary category for applications of machine learning methods.' -), -( - 'cs.LO', - 'Logic in Computer Science', - 'Covers all aspects of logic in computer science, including finite model theory, logics of programs, modal logic, and program verification. Programming language semantics should have Programming Languages as the primary subject area. Roughly includes material in ACM Subject Classes D.2.4, F.3.1, F.4.0, F.4.1, and F.4.2; some material in F.4.3 (formal languages) may also be appropriate here, although Computational Complexity is typically the more appropriate subject area.' -), -( - 'cs.MA', - 'Multiagent Systems', - 'Covers multiagent systems, distributed artificial intelligence, intelligent agents, coordinated interactions. and practical applications. Roughly covers ACM Subject Class I.2.11.' -), -( - 'cs.MM', - 'Multimedia', - 'Roughly includes material in ACM Subject Class H.5.1.' -), -( - 'cs.MS', - 'Mathematical Software', - 'Roughly includes material in ACM Subject Class G.4.' -), -( - 'cs.NA', - 'Numerical Analysis', - 'cs.NA is an alias for math.NA. Roughly includes material in ACM Subject Class G.1.' -), -( - 'cs.NE', - 'Neural and Evolutionary Computing', - 'Covers neural networks, connectionism, genetic algorithms, artificial life, adaptive behavior. Roughly includes some material in ACM Subject Class C.1.3, I.2.6, I.5.' -), -( - 'cs.NI', - 'Networking and Internet Architecture', - 'Covers all aspects of computer communication networks, including network architecture and design, network protocols, and internetwork standards (like TCP/IP). Also includes topics, such as web caching, that are directly relevant to Internet architecture and performance. Roughly includes all of ACM Subject Class C.2 except C.2.4, which is more likely to have Distributed, Parallel, and Cluster Computing as the primary subject area.' -), -( - 'cs.OH', - 'Other Computer Science', - 'This is the classification to use for documents that do not fit anywhere else.' -), -( - 'cs.OS', - 'Operating Systems', - 'Roughly includes material in ACM Subject Classes D.4.1, D.4.2., D.4.3, D.4.4, D.4.5, D.4.7, and D.4.9.' -), -( - 'cs.PF', - 'Performance', - 'Covers performance measurement and evaluation, queueing, and simulation. Roughly includes material in ACM Subject Classes D.4.8 and K.6.2.' -), -( - 'cs.PL', - 'Programming Languages', - 'Covers programming language semantics, language features, programming approaches (such as object-oriented programming, functional programming, logic programming). Also includes material on compilers oriented towards programming languages; other material on compilers may be more appropriate in Architecture (AR). Roughly includes material in ACM Subject Classes D.1 and D.3.' -), -( - 'cs.RO', - 'Robotics', - 'Roughly includes material in ACM Subject Class I.2.9.' -), -( - 'cs.SC', - 'Symbolic Computation', - 'Roughly includes material in ACM Subject Class I.1.' -), -( - 'cs.SD', - 'Sound', - 'Covers all aspects of computing with sound, and sound as an information channel. Includes models of sound, analysis and synthesis, audio user interfaces, sonification of data, computer music, and sound signal processing. Includes ACM Subject Class H.5.5, and intersects with H.1.2, H.5.1, H.5.2, I.2.7, I.5.4, I.6.3, J.5, K.4.2.' -), -( - 'cs.SE', - 'Software Engineering', - 'Covers design tools, software metrics, testing and debugging, programming environments, etc. Roughly includes material in all of ACM Subject Classes D.2, except that D.2.4 (program verification) should probably have Logics in Computer Science as the primary subject area.' -), -( - 'cs.SI', - 'Social and Information Networks', - 'Covers the design, analysis, and modeling of social and information networks, including their applications for on-line information access, communication, and interaction, and their roles as datasets in the exploration of questions in these and other domains, including connections to the social and biological sciences. Analysis and modeling of such networks includes topics in ACM Subject classes F.2, G.2, G.3, H.2, and I.2; applications in computing include topics in H.3, H.4, and H.5; and applications at the interface of computing and other disciplines include topics in J.1--J.7. Papers on computer communication systems and network protocols (e.g. TCP/IP) are generally a closer fit to the Networking and Internet Architecture (cs.NI) category.' -), -( - 'cs.SY', - 'Systems and Control', - 'cs.SY is an alias for eess.SY. This section includes theoretical and experimental research covering all facets of automatic control systems. The section is focused on methods of control system analysis and design using tools of modeling, simulation and optimization. Specific areas of research include nonlinear, distributed, adaptive, stochastic and robust control in addition to hybrid and discrete event systems. Application areas include automotive and aerospace control systems, network control, biological systems, multiagent and cooperative control, robotics, reinforcement learning, sensor networks, control of cyber-physical and energy-related systems, and control of computing systems.' -); diff --git a/demo/install/resources/arxiv_papers.pickle b/demo/install/resources/arxiv_papers.pickle deleted file mode 100644 index bed7d365dc..0000000000 Binary files a/demo/install/resources/arxiv_papers.pickle and /dev/null differ diff --git a/demo/install/resources/devcon_dataset.sql b/demo/install/resources/devcon_dataset.sql deleted file mode 100644 index 1e135bee47..0000000000 --- a/demo/install/resources/devcon_dataset.sql +++ /dev/null @@ -1,898 +0,0 @@ --- --- Name: Days; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Days" ( - id integer NOT NULL, - "Name" text, - "Date" date -); - - --- --- Name: Job Titles; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Job Titles" ( - id integer NOT NULL, - "Titles" text -); - - --- --- Name: Organizations; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Organizations" ( - id integer NOT NULL, - "Organization" text -); - - --- --- Name: Positions_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Positions_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Positions_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Positions_id_seq" OWNED BY "Job Titles".id; - - --- --- Name: Rooms; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Rooms" ( - id integer NOT NULL, - "Name" text NOT NULL, - "Capacity" numeric -); - - --- --- Name: Speakers; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Speakers" ( - id integer NOT NULL, - "First Name" text, - "Last Name" text, - "Email" mathesar_types.email NOT NULL, - "Bio" text, - "Job Title" integer, - "Organization" integer -); - - --- --- Name: Table 115_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Table 115_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Table 115_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Table 115_id_seq" OWNED BY "Speakers".id; - - --- --- Name: Table 117_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Table 117_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Table 117_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Table 117_id_seq" OWNED BY "Rooms".id; - - --- --- Name: Topics; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Topics" ( - id integer NOT NULL, - "Name" text -); - - --- --- Name: Table 118_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Table 118_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Table 118_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Table 118_id_seq" OWNED BY "Topics".id; - - --- --- Name: Table 119_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Table 119_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Table 119_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Table 119_id_seq" OWNED BY "Days".id; - - --- --- Name: Talks; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Talks" ( - id integer NOT NULL, - "Title" text, - "Day" integer, - "Room" integer, - "Speaker" integer, - "Abstract" text, - "Time Slot" integer, - "Track" integer -); - - --- --- Name: Table 120_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Table 120_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Table 120_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Table 120_id_seq" OWNED BY "Talks".id; - - --- --- Name: Table 67_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Table 67_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Table 67_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Table 67_id_seq" OWNED BY "Organizations".id; - - --- --- Name: Talk Topic Map; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Talk Topic Map" ( - id integer NOT NULL, - "Talk" integer, - "Topic" integer -); - - --- --- Name: Talk Topic Map_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Talk Topic Map_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Talk Topic Map_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Talk Topic Map_id_seq" OWNED BY "Talk Topic Map".id; - - --- --- Name: Time Slots; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Time Slots" ( - id integer NOT NULL, - "Slot" text NOT NULL -); - - --- --- Name: Time Slots_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Time Slots_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Time Slots_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Time Slots_id_seq" OWNED BY "Time Slots".id; - - --- --- Name: Tracks; Type: TABLE; Schema: Mathesar Con; Owner: - --- - -CREATE TABLE "Tracks" ( - id integer NOT NULL, - "Name" text -); - - --- --- Name: Tracks_id_seq; Type: SEQUENCE; Schema: Mathesar Con; Owner: - --- - -CREATE SEQUENCE "Tracks_id_seq" - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: Tracks_id_seq; Type: SEQUENCE OWNED BY; Schema: Mathesar Con; Owner: - --- - -ALTER SEQUENCE "Tracks_id_seq" OWNED BY "Tracks".id; - - --- --- Name: Days id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Days" ALTER COLUMN id SET DEFAULT nextval('"Table 119_id_seq"'::regclass); - - --- --- Name: Job Titles id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Job Titles" ALTER COLUMN id SET DEFAULT nextval('"Positions_id_seq"'::regclass); - - --- --- Name: Organizations id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Organizations" ALTER COLUMN id SET DEFAULT nextval('"Table 67_id_seq"'::regclass); - - --- --- Name: Rooms id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Rooms" ALTER COLUMN id SET DEFAULT nextval('"Table 117_id_seq"'::regclass); - - --- --- Name: Speakers id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Speakers" ALTER COLUMN id SET DEFAULT nextval('"Table 115_id_seq"'::regclass); - - --- --- Name: Talk Topic Map id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talk Topic Map" ALTER COLUMN id SET DEFAULT nextval('"Talk Topic Map_id_seq"'::regclass); - - --- --- Name: Talks id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talks" ALTER COLUMN id SET DEFAULT nextval('"Table 120_id_seq"'::regclass); - - --- --- Name: Time Slots id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Time Slots" ALTER COLUMN id SET DEFAULT nextval('"Time Slots_id_seq"'::regclass); - - --- --- Name: Topics id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Topics" ALTER COLUMN id SET DEFAULT nextval('"Table 118_id_seq"'::regclass); - - --- --- Name: Tracks id; Type: DEFAULT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Tracks" ALTER COLUMN id SET DEFAULT nextval('"Tracks_id_seq"'::regclass); - - --- --- Data for Name: Days; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Days" (id, "Name", "Date") VALUES (1, 'Friday', '2023-01-20'); -INSERT INTO "Days" (id, "Name", "Date") VALUES (2, 'Saturday', '2023-01-21'); -INSERT INTO "Days" (id, "Name", "Date") VALUES (3, 'Sunday', '2023-01-22'); - - --- --- Data for Name: Job Titles; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Job Titles" (id, "Titles") VALUES (1, 'Director of Technology'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (3, 'Product Designer'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (4, 'Product Manager'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (5, 'User Advocate'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (6, 'Store Manager'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (7, 'Business Analyst'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (8, 'Sysadmin'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (9, 'Data Journalist'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (10, 'Hospital Administrator'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (11, 'Educator'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (2, 'Software Engineer'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (12, 'Data Coordinator'); -INSERT INTO "Job Titles" (id, "Titles") VALUES (13, 'Founder'); - - --- --- Data for Name: Organizations; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Organizations" (id, "Organization") VALUES (1, 'Center of Complex Interventions(CCI)'); -INSERT INTO "Organizations" (id, "Organization") VALUES (3, 'PostgreSQL Global Development Group'); -INSERT INTO "Organizations" (id, "Organization") VALUES (2, 'Django Software Foundation'); -INSERT INTO "Organizations" (id, "Organization") VALUES (4, 'Svelte Foundation'); -INSERT INTO "Organizations" (id, "Organization") VALUES (5, 'Becker Group'); -INSERT INTO "Organizations" (id, "Organization") VALUES (6, 'Learning Equality'); -INSERT INTO "Organizations" (id, "Organization") VALUES (7, 'Harper Automotive'); -INSERT INTO "Organizations" (id, "Organization") VALUES (9, 'The Data Digest -'); -INSERT INTO "Organizations" (id, "Organization") VALUES (10, 'Hope Medical Center'); -INSERT INTO "Organizations" (id, "Organization") VALUES (8, 'State of Palmchester'); -INSERT INTO "Organizations" (id, "Organization") VALUES (11, 'Fresh Basket'); -INSERT INTO "Organizations" (id, "Organization") VALUES (12, 'Data Dynamics'); - - --- --- Data for Name: Rooms; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Rooms" (id, "Name", "Capacity") VALUES (1, 'Main Hall', 120); -INSERT INTO "Rooms" (id, "Name", "Capacity") VALUES (2, '202A', 35); -INSERT INTO "Rooms" (id, "Name", "Capacity") VALUES (3, '202B', 35); -INSERT INTO "Rooms" (id, "Name", "Capacity") VALUES (4, '202C', 35); -INSERT INTO "Rooms" (id, "Name", "Capacity") VALUES (5, '202D', 35); - - --- --- Data for Name: Speakers; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (18, 'Edward', 'Bautista', 'edwardb@example.org', 'Edward is an experienced retail professional who has been the store manager for The Fresh Basket for the past 5 years. He has a passion for providing customers with high-quality, fresh, and healthy food options. Under his leadership, The Fresh Basket has become a well-known destination for those who are looking for a wide variety of fresh fruits, vegetables, and other products. He is always looking for ways to improve the store''s operations and customer experience, and has implemented various technology solutions to streamline the store''s processes and improve efficiency. Edward is excited to share his experiences and insights at the upcoming tech conference and hopes to learn from other industry professionals.', 6, 11); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (25, 'Ryan', 'Fisher', 'ryan.fisher@example.gov', 'Ryan Fisher is a data coordinator for the State of Palmchester, where he manages and analyzes data for various government departments. He has a strong background in data management, analysis and visualization, and he is a skilled professional in using various data management tools and software. Ryan is an expert in providing insights and recommendations based on data analysis to support decision-making processes. He has been working in the field for over 5 years and has a deep understanding of data privacy and security standards.', 12, 8); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (2, 'Sarah', 'Smith', 'sarah@example.org', 'Sarah is a designer with a focus on experimenting and iterating with different design processes. During her twelve-year career in tech, she has worked on various products at healthcare and enterprise software startups. She enjoys sharing her ideas and experience with people and helping them think creatively. Sarah is currently the product designer for the Mathesar project, which aims to help people collaborate, develop, and explore the potential of database technologies to meet the challenges of an increasingly complex world.', 3, 1); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (16, 'Daniel', 'Novak', 'novak.daniel@example.auto', 'Daniel is a project manager at Harper Automotive. He has a deep understanding of automotive industry and has been iinstrumental in implementing various technology solutions to improve the company''s operations, efficiency and productivity. He is an expert in project management methodologies and has a talent for leading cross-functional teams and coordinating with stakeholders to achieve project goals.', 4, 7); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (1, 'John', 'Davis', 'john@example.org', 'John is a highly experienced backend engineer with over 10 years of experience in building complex database systems. He is a key contributor to the Mathesar project and is responsible for designing and implementing the core architecture of the platform. John is passionate about making technology accessible to non-technical users and is dedicated to making Mathesar as user-friendly as possible.', 2, 1); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (6, 'Robert', 'Martinez', 'robert@example.org', 'Robert is a seasoned backend engineer with a strong background in software development. He is an expert in web development and has a deep understanding of the technologies that are used to build Mathesar. Robert is responsible for building the RESTful API of Mathesar, which allows for easy integration with other systems.', 2, 1); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (7, 'Joshua', 'Hernandez', 'joshua@example.org', 'Joshua is a frontend engineer with a background in computer science and a passion for user experience. Joshua has been working on Mathesar for over 2 years and is excited to see the project come to fruition. Joshua is dedicated to building an intuitive and user-friendly interface that makes it easy for anyone to work with databases, regardless of their technical knowledge. He is committed to using his skills and experience to create a user-friendly interface that is both functional and visually appealing.', 2, 1); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (8, 'Jacob', 'Lopez', 'jacob@example.org', 'Jacob is a backend developer with a passion for data science. He has a deep understanding of data structures and algorithms, and is an expert in Python programming. Jacob is responsible for building the data analytics and visualization features of Mathesar, which allows non-technical users to easily explore and understand their data. He is dedicated to making Mathesar a powerful tool for data-driven decision making.', 2, 1); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (4, 'David', 'Garcia', 'david@example.org', 'David is a skilled backend developer with a background in data engineering. He has a deep understanding of database systems and is an expert in SQL and noSQL databases. John is responsible for developing the data access layer of Mathesar, ensuring that it is optimized for performance and scalability.', 2, 1); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (3, 'Michael', 'Brown', 'michael@example.org', 'Michael is an experienced frontend engineer with a background in software development. Michael has a strong understanding of web technologies and is committed to building interfaces that are both functional and visually appealing. Michael is dedicated to building an intuitive and user-friendly interface that makes it easy for anyone to work with databases.', 2, 1); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (9, 'James', 'Rodriguez', 'james@example.org', 'James is a frontend engineer with a passion for creating intuitive and user-friendly interfaces. With over 5 years of experience in frontend development, James has a strong understanding of web technologies and a keen eye for design. James is dedicated to building a user-friendly interface that makes it easy for anyone to work with databases.', 2, 1); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (14, 'Bob', 'Johnson', 'bob@example.org', 'Bob is an engineer at the Postgresql Global Development Group. He specializes in implementing new features related to database management and has a strong background in SQL programming. Bob is dedicated to improving the overall performance and functionality of Postgresql databases, and is always looking for new ways to push the limits of what can be achieved with this powerful tool.', 2, 3); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (11, 'Amy', 'Miller', 'amymiller@example.org', 'Amy is a software engineer who has been working at the Django Software Foundation for over 5 years. She specializes in implementing new features and improving the overall performance of the Django web framework. Amy is passionate about open-source software and enjoys collaborating with other engineers to build better tools for developers.', 2, 2); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (15, 'Michael', 'Williams', 'michaelwill@example.org', 'Michael is an experienced software engineer at Svelte Foundation. He has been instrumental in implementing new features and improvements to the framework, making it more user-friendly and efficient.', 2, 4); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (21, 'Sean', 'Hernandez', 'sean@example.org', 'Sean is a passionate data journalist who has been working with the Data Digest for the past 3 years. As a data-driven weekly newsletter, the Data Digest is a platform that provides insights and analysis on the latest tech trends and developments. Sean''s extensive experience in data journalism, combined with his deep understanding of the tech industry, makes him the perfect fit for the Data Digest. Sean''s work has been recognized for its in-depth analysis and ability to make complex data easy to understand for the general audience. He is always looking for new ways to tell compelling stories through data, and he is constantly seeking new ways to make data more accessible to the public.', 9, 9); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (20, 'Patricia', 'Daniels', 'patricia@example.org', 'Patricia is a highly skilled and experienced system administrator who has been working at The Becker Group for the past 5 years. She specializes in the implementation and maintenance of complex IT systems and has a deep understanding of various operating systems, including Windows and Linux. Patricia has been responsible for the day-to-day operations of the IT infrastructure of the Becker Group and has played an instrumental role in ensuring the smooth and efficient functioning of the organization''s IT systems. ', 8, 5); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (24, 'Brandon', 'Peters', 'brandon@example.org', 'Brandon is an experienced educator and technology advocate who has been working with Learning Equality, a non-profit organization, for the past 5 years. He specializes in designing and implementing technology-based education programs for underprivileged communities. Brandon has a deep understanding of the challenges and opportunities of digital education and has been instrumental in creating digital resources and tools that enable children to learn in a fun and interactive way. He is passionate about using technology to create a more inclusive and equitable world, and is always eager to share his knowledge and experience with others.', 11, 6); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (22, 'Caroline', 'Martin', 'caroline@example.org', 'Caroline is an experienced hospital administrator who currently works at the Hope Medical Center. She has over 10 years of experience in the healthcare industry, with a focus on managing and improving hospital operations. Caroline has a deep understanding of healthcare technology and has been instrumental in implementing new systems and processes at the Hope Medical Center to improve patient care and streamline operations. Caroline shares her knowledge and insights on how technology can be used to improve patient outcomes and hospital efficiency.', 10, 10); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (19, 'Collin', 'Gutierrez', 'collin@example.org', 'Collin is a highly skilled business analyst with a proven track record of delivering results. He has been working with the Becker Group for the past 5 years, where he has been responsible for analyzing and interpreting data to help the company make strategic business decisions. He is a problem solver and a critical thinker who is always looking for ways to improve the company''s bottom line. Collin''s ability to think outside the box and his keen analytical skills make him a valuable asset to the Becker Group and the tech industry as a whole.', 7, 5); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (17, 'Kevin', 'Rivera', 'kev.riv@example.org', 'Kevin is a passionate user advocate with over 5 years of experience in the tech industry. He currently works for the Becker Group as a user advocate, where he helps to improve the user experience and ensure that the products and services offered by the company meet the needs of its customers. Kevin has a strong background in user research, user testing, and usability evaluation. He has a deep understanding of the user-centered design process and is skilled in conducting user research, analyzing data, and making recommendations for product improvements.', 5, 5); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (26, 'Christopher', 'Smith', 'chris.smith@example.org', 'Cristopher is a tech entrepreneur and the founder of a rapidly growing startup. He has a deep understanding of the software development process, under his leadership, his startup has grown to become one of the leading companies in the field of data analytics and development.', 13, 12); -INSERT INTO "Speakers" (id, "First Name", "Last Name", "Email", "Bio", "Job Title", "Organization") VALUES (5, 'Emily', 'Johnson', 'emily@example.org', 'Emily is the Director of Technology at Mathesar, with a background in computer science and a passion for user experience, Emily is dedicated to making Mathesar''s user interface as intuitive and user-friendly as possible. She leads the development team and is constantly working to improve the software, making it easier for non-technical users to work with both existing and new databases without any prior knowledge of database concepts. Emily''s goal is to make Mathesar the go-to solution for anyone who needs to access and manage data without the need for technical expertise.', 1, 1); - - --- --- Data for Name: Talk Topic Map; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (1, 2, 2); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (26, 13, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (2, 2, 1); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (3, 2, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (27, 13, 6); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (4, 2, 3); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (5, 3, 5); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (6, 4, 6); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (7, 4, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (28, 14, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (8, 4, 5); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (29, 15, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (9, 5, 1); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (10, 5, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (11, 6, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (30, 16, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (12, 6, 6); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (31, 16, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (13, 6, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (14, 7, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (32, 16, 1); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (15, 7, 3); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (33, 17, 1); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (16, 8, 5); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (17, 8, 4); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (34, 17, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (18, 9, 3); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (35, 18, 1); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (19, 9, 1); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (36, 18, 3); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (20, 10, 2); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (37, 19, 4); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (21, 10, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (22, 11, 4); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (38, 19, 5); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (23, 11, 5); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (24, 12, 4); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (25, 12, 5); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (69, 38, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (39, 20, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (40, 21, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (70, 39, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (41, 21, 2); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (71, 39, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (43, 22, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (72, 40, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (42, 22, 2); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (73, 40, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (44, 22, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (45, 23, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (74, 41, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (46, 23, 3); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (47, 24, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (75, 41, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (48, 25, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (49, 26, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (50, 27, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (51, 28, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (52, 29, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (53, 29, 2); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (54, 30, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (55, 30, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (56, 30, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (57, 31, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (58, 31, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (59, 32, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (60, 32, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (61, 33, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (62, 33, 3); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (63, 34, 7); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (64, 34, 3); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (65, 35, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (66, 36, 8); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (67, 36, 9); -INSERT INTO "Talk Topic Map" (id, "Talk", "Topic") VALUES (68, 37, 9); - - --- --- Data for Name: Talks; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (2, 'SQLAlchemy and Dynamic Defaults', 1, 2, 8, 'SQLAlchemy doesn''t support dynamic default values very well. In this talk, we explore options for how to both create and parse dynamic default values for database columns using SQLAlchemy.', 7, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (5, 'Python for databases', 1, 2, 1, 'In this talk we explore a number of challenges and constraints encountered when using python to interact with a PostgreSQL DB.', 8, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (17, 'PostgreSQL: The best SQL', 2, 1, 4, 'In this talk we will take a look at the current state of PostgreSQL, and explore its features which made it a natural fit for the Database System backing Mathesar.', 8, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (30, 'Data Engineering and Mathesar', 3, 1, 6, 'In this talk, we take a look at data tools such as Apache Spark, and how you can integrate their use with the Mathesar UI.', 7, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (31, 'Data Science and Mathesar', 3, 1, 4, 'In this talk, we will review some basics of data science, including key concepts. You will learn how these concepts can be implemented and used when working with Mathesar.', 1, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (4, 'Exploring Data Visualization: Techniques and Tools', 1, 1, 9, 'In this talk, you will learn about different techniques and tools that you can use to create effective data visualizations. You will learn about different types of charts and graphs, and how to choose the right visualization for your data. You will also learn about tools like D3.js and Tableau, and how to use them to create interactive and engaging visualizations.', 7, 4); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (29, 'Building Modern Web Applications with Svelte.js', 2, 1, 7, 'In this talk, you will learn about Svelte.js, a modern JavaScript framework for building web applications. You will learn about the principles and philosophy behind Svelte, and how it differs from other frameworks. You will also learn about the core concepts and features of Svelte, and how to use them to build efficient and reactive applications.', 1, 4); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (18, 'Advanced Svelte.js Techniques: Animations, Server-Side Rendering, and More', 2, 2, 3, 'In this talk, you will learn about advanced techniques for using Svelte.js, such as animations, server-side rendering, and performance optimization. You will learn about different tools and libraries that you can use to extend the capabilities of Svelte, and how to apply these techniques in real-world projects. You will also learn about some of the trade-offs and considerations when using advanced Svelte techniques.', 7, 4); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (19, 'Integrating Svelte.js with Other Technologies', 2, 2, 9, 'In this talk, you will learn about how to integrate Svelte.js with other technologies such as APIs, databases, and front-end libraries. You will learn about different techniques and tools for connecting Svelte to external data sources and services, and how to build full-stack applications with Svelte. You will also learn about some of the considerations and challenges when integrating Svelte with other technologies.', 8, 4); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (33, 'Svelte.js Best Practices: Tips and Tricks', 3, 3, 7, 'In this talk, you will learn about best practices and tips and tricks for using Svelte.js. You will learn about techniques such as code organization, testing, and performance optimization, and how to apply them in your Svelte projects. You will also learn about some of the common pitfalls and mistakes to avoid when using Svelte.', 7, 4); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (16, 'Django and its use in Mathesar', 2, 1, 6, 'In this talk we will explore Django, how it''s used in Mathesar, and the pros/cons of the tool when solving our problems.', 7, 5); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (6, 'No-Code Data Analysis: Tools and Techniques', 1, 3, 2, 'In this talk, you will learn about different tools and techniques that you can use to perform data analysis without writing any code. You will learn about platforms like Excel, Google Sheets, and Mathesar, and how to use them to manipulate, analyze, and visualize data. You will also learn about some of the benefits and limitations of using no-code data analysis tools, and how to choose the right one for your needs.', 7, NULL); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (7, 'Integrating with No-Code tools', 1, 3, 8, 'In this talk, you will learn about different no-code tools, and how to integrate with them in the context of a Mathesar project.', 8, NULL); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (20, 'Designing for Data', 2, 3, 2, 'In this talk we discuss principles, approaches, and some challenges associated with designing a product for data input and analysis.', 7, NULL); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (1, 'The Mathesar Vision', 1, 1, 5, 'In this introduction and welcome, we discuss the vision of Mathesar, and why you should be excited to attend our exclusive conference.', 1, 6); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (21, 'Managing Engineers: a metaphorical approach', 2, 3, 5, 'When your cats are on a hot tin roof, should you herd them, skin them, or let them out of the bag? The roof is a deadline. The cats are engineers.', 8, 6); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (23, 'Retail Success with Mathesar: Boosting Sales and Enhancing the Customer Experience -', 2, 4, 18, 'This talk will cover how businesses in the retail industry have used Mathesar to boost sales and improve customer experience, including how it has helped with data-driven decision-making, inventory management, and customer segmentation', 8, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (32, 'Building APIs with Django Rest Framework', 3, 2, 1, 'In this talk, you will learn about how to use Django Rest Framework (DRF), a powerful Django extension for building APIs. You will learn about the features and architecture of DRF, and how to use it to build RESTful APIs for your Django projects. You will also learn about some of the best practices and tools for testing and documenting your APIs.', 7, 5); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (3, 'Optimizing Web Performance: Techniques and Tools', 1, 1, 3, 'In this talk, you will learn about different techniques and tools that you can use to optimize the performance of your web applications. You will learn about techniques such as code optimization, caching, and load balancing, as well as tools like performance monitoring and profiling.', 8, 4); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (15, 'The Mathesar Vision, part 2', 3, 1, 5, 'In this final talk of the last day, we look at how we think Mathesar will evolve over the coming months and years, and hope to inspire you with our direction!', 8, 6); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (9, 'Integrating Mathesar with other tools and platforms', 1, 4, 14, 'Discover how to use Mathesar in conjunction with other tools and platforms, including how to import and export data and use APIs to connect to other systems. -', 8, 5); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (14, 'Taking Control of Your Finances with Mathesar', 1, 4, 19, 'I''ll be sharing my personal experience of how I used Mathesar to take control of my finances. I''ll show you how I use Mathesar to track my income and expenses, create custom reports and visualizations, and set financial goals. I''ll also show you how I use data models and custom queries to identify areas for improvement and make informed decisions about spending. I''ll share tips and tricks on how I use Mathesar to manage multiple income streams and expenses and how to budget and plan for financial goals like saving for retirement or buying a house. ', 9, 6); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (10, 'Mathesar for project management: Best practices', 1, 5, 16, 'This talk will explore how Mathesar can be used to manage projects, including how to set up custom explorations, track progress, and collaborate with team members. -', 8, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (39, 'Getting Started with Mathesar Server Administration', 3, 5, 20, 'In this talk, a Mathesar administrator will provide an overview of the process of installing and configuring a Mathesar server. Topics covered will include system requirements, installation options, initial setup, and best practices for maintaining and troubleshooting a Mathesar server. Attendees will learn how to set up a Mathesar server from scratch and will be prepared to address common issues and troubleshoot problems.', 8, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (35, 'Building data models with Mathesar: A step-by-step guide', 3, 3, 5, 'Learn how to build complex data models using Mathesar, including how to define relationships between data, set up calculated fields, and build reports.', 1, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (26, 'Opening Data to the Public: How City Government used Mathesar to increase transparency and citizen engagement', 2, 3, 25, 'In this talk, a representative from a city government will share how their organization has used Mathesar to increase transparency and citizen engagement by sharing data publicly. They will discuss how they used Mathesar to create an open data portal, making public data sets such as crime statistics, real estate transactions and public transportation schedules available to citizens. They will also share specific examples of how this data has been used by residents, researchers, and developers to improve the city and generate positive outcomes, such as reducing crime and improving public transportation. -', 9, 6); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (22, 'Maximizing the Potential of Mathesar in Healthcare: Real-world Examples', 2, 4, 22, 'In this talk, we will present case studies on how businesses in the healthcare industry have used Mathesar to improve patient outcomes and streamline operations, including how it has helped with data management, analysis, and reporting.', 7, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (36, 'Achieving Data-Driven Decision Making with Mathesar: My Experience', 3, 4, 19, 'Collin a business analyst at Becker Group, will share his experience using Mathesar to achieve data-driven decision making in their organization. He will discuss how he set up data models, organized data, performed analysis and how he was able to use insights to drive action and achieve the company''s goals.', 7, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (25, 'Unlocking the Power of Data for Education: Our Experience at Education for All', 2, 5, 24, 'In this talk, an employee from Learning Equality, a nonprofit organization focused on improving access to education, will share how their organization has used Mathesar to unlock the power of data and drive progress towards their mission. They will discuss how Mathesar helped them to manage and analyze data on student performance, resources, and demographics, and how it enabled them to track progress, identify areas for improvement, and make data-driven decisions to support student success. They will provide specific examples of how Mathesar has been used to support their mission and the positive outcomes that have been achieved.', 1, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (40, 'Investigating with Mathesar: How data journalists used Mathesar to analyze public data sets to uncover stories', 3, 4, 21, 'In this talk, data journalists will share how they used Mathesar to analyze public data sets to uncover stories and investigate important issues. They will discuss specific examples of how they used Mathesar to analyze data sets such as government spending, campaign contributions and crime statistics to uncover patterns and trends. They will also share specific examples of how the data was used to inform their reporting, and lead to stories that would have been otherwise hard to find or would have remained hidden. Attendees will learn how to use Mathesar to analyze and make sense of large, complex data sets to uncover important stories and investigate important issues. -', 8, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (8, 'Advanced data analysis with Mathesar: grouping, filters, and more', 1, 4, 16, 'Learn how to use Mathesar''s advanced data analysis features, including grouping, filters, and summarization, to uncover insights and trends in your data.', 7, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (11, 'Migrating to Mathesar: Tips and considerations', 1, 5, 15, 'If you''re planning to switch to Mathesar from another database tool, this talk will cover best practices for migration, including how to transfer data, set up new models, and onboard users.', 9, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (12, 'Advanced Mathesar Server Configuration and Management', 1, 2, 20, 'This talk will cover advanced topics for configuring and managing a Mathesar server, including performance tuning, security, and backup and recovery. It will also show how to access and use Mathesar''s API and command-line tools to automate and manage your Mathesar server. Attendees will learn how to optimize the performance of their Mathesar server, secure their data, and ensure that they can recover their data in the event of an emergency.', 9, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (13, 'Organizing your Home Media with Mathesar: Creating Custom Data Models and Relationships -', 1, 5, 14, 'In this talk, an experienced Mathesar user will share how they have used Mathesar to manage their home media collection, including movies, TV shows, and music. They will discuss how they used Mathesar to create custom data models and set up relationships between different media types, and demonstrate how to use Mathesar''s querying and reporting capabilities to easily find and organize media. They will also show how they used the data modeling and relationship features of Mathesar to create a collection of movie and TV series metadata, connected to their local media files. The talk will provide tips and tricks on how to organize and create data models that fit your media collection. Attendees will learn how to use Mathesar to organize their home media collections and unlock new ways to explore and enjoy their media.', 1, 3); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (27, 'Optimizing MMORPG Play with Mathesar: Tips and Tricks for Gamers', 2, 4, 17, 'In this talk, a dedicated MMORPG player will share how they use Mathesar to optimize their gameplay and gain a competitive edge. They will demonstrate how they use Mathesar to track and analyze their character''s statistics, inventory, and quest progress. They will also show how they use Mathesar to create custom reports and visualizations to monitor their performance, identify areas for improvement and plan strategies for their next steps in the game. Additionally, they will share how they use Mathesar to track and manage their guild''s resources and member''s information. They will provide tips and tricks on how to use Mathesar to make the most of your MMORPG experience, from character development to group management and from resource tracking to performance monitoring.', 9, NULL); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (28, 'Rapid App Prototyping with Mathesar: From Idea to MVP in No-Time', 2, 5, 26, 'I''ll be sharing my experience of how I used Mathesar to quickly prototype a new app idea without writing any code. I''ll show you how I used Mathesar''s data modeling and querying capabilities to create a functional prototype of the app, complete with a dynamic and interactive user interface. I''ll also discuss how I approached validating my prototype with potential users. ', 7, 5); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (41, 'Streamlining Grocery Planning and Recipe Tracking with Mathesar: A Mom''s Perspective', 3, 3, 22, 'A busy mom will share how she uses Mathesar to streamline her grocery planning and recipe tracking. She will demonstrate how she uses Mathesar to keep track of her family''s dietary restrictions, create shopping lists, and plan meals for the week. She will also show how she uses Mathesar to organize her recipes by categories, ingredients and dietary restrictions and how she uses it to plan and schedule her cooking routine, using the data provided by the data models and custom queries. She will also share how she uses Mathesar to track ingredients inventory and monitor expiration dates. ', 9, 6); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (37, 'Transforming Manufacturing with Mathesar: Using Predictive Analytics to Improve Productivity', 3, 4, 16, 'Daniel, the production manager at Harper Automotive, will share how his factory has been using Mathesar to increase productivity in an unusual way. He will discuss how they have been using predictive analytics to predict equipment failures and prevent downtime. By using Mathesar to analyze data from their machinery and monitoring systems, they were able to detect patterns indicating that equipment was likely to fail, and then schedule maintenance before it happened. As a result, they were able to reduce unplanned downtime and increase overall equipment availability, which led to a significant increase in productivity. Daniel will also share tips on how other factories, particularly in the automotive industry, can use Mathesar to implement similar predictive maintenance strategies and gain similar benefits.', 9, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (24, 'Streamlining Emergency Response with Mathesar: Our Experience at Disaster Relief Fund', 2, 5, 22, 'In this talk, a representative from Disaster Relief Fund, a nonprofit organization focused on providing emergency response, will share their experience using Mathesar to increase efficiency and effectiveness in emergency response. They will discuss how Mathesar helped their organization to manage and analyze data on resources, logistics, and needs, and how it enabled them to make data-driven decisions to drive progress towards their mission of providing aid to affected communities. They will provide specific examples of how Mathesar has been utilized in emergency response operations and the positive outcomes it has helped achieve.', 9, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (34, 'Collaborating with Mathesar: Tips and best practices', 3, 2, 11, 'This talk will cover best practices for collaborating with colleagues using Mathesar, including tips for organizing data, sharing explorations, and working together in real-time.', 8, 1); -INSERT INTO "Talks" (id, "Title", "Day", "Room", "Speaker", "Abstract", "Time Slot", "Track") VALUES (38, 'Mastering Mathesar: Tips and Tricks for Setting Up and Configuring Your Database', 3, 5, 17, 'In this talk, an experienced Mathesar user will share tips and tricks for setting up and configuring a Mathesar database. They will cover topics such as data modeling, data entry, and query optimization, as well as best practices for creating effective reports and visualizations. Attendees will learn how to quickly and efficiently set up a Mathesar installation and start using it to gain insights from their data.', 7, 3); - - --- --- Data for Name: Time Slots; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Time Slots" (id, "Slot") VALUES (1, '08:00-10:00'); -INSERT INTO "Time Slots" (id, "Slot") VALUES (7, '13:00-15:00'); -INSERT INTO "Time Slots" (id, "Slot") VALUES (8, '15:30-17:30'); -INSERT INTO "Time Slots" (id, "Slot") VALUES (9, '10:30-12:30'); - - --- --- Data for Name: Topics; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Topics" (id, "Name") VALUES (1, 'Python'); -INSERT INTO "Topics" (id, "Name") VALUES (2, 'SQL'); -INSERT INTO "Topics" (id, "Name") VALUES (3, 'Back End'); -INSERT INTO "Topics" (id, "Name") VALUES (4, 'Front End'); -INSERT INTO "Topics" (id, "Name") VALUES (5, 'Javascript'); -INSERT INTO "Topics" (id, "Name") VALUES (6, 'Design'); -INSERT INTO "Topics" (id, "Name") VALUES (7, 'Product'); -INSERT INTO "Topics" (id, "Name") VALUES (8, 'Management'); -INSERT INTO "Topics" (id, "Name") VALUES (9, 'Database'); - - --- --- Data for Name: Tracks; Type: TABLE DATA; Schema: Mathesar Con; Owner: - --- - -INSERT INTO "Tracks" (id, "Name") VALUES (3, 'Databases'); -INSERT INTO "Tracks" (id, "Name") VALUES (4, 'Front End'); -INSERT INTO "Tracks" (id, "Name") VALUES (5, 'Web Service'); -INSERT INTO "Tracks" (id, "Name") VALUES (1, 'Project Management'); -INSERT INTO "Tracks" (id, "Name") VALUES (6, 'Management'); - - --- --- Name: Positions_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Positions_id_seq"', 13, true); - - --- --- Name: Table 115_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Table 115_id_seq"', 26, true); - - --- --- Name: Table 117_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Table 117_id_seq"', 5, true); - - --- --- Name: Table 118_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Table 118_id_seq"', 9, true); - - --- --- Name: Table 119_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Table 119_id_seq"', 3, true); - - --- --- Name: Table 120_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Table 120_id_seq"', 43, true); - - --- --- Name: Table 67_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Table 67_id_seq"', 12, true); - - --- --- Name: Talk Topic Map_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Talk Topic Map_id_seq"', 75, true); - - --- --- Name: Time Slots_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Time Slots_id_seq"', 9, true); - - --- --- Name: Tracks_id_seq; Type: SEQUENCE SET; Schema: Mathesar Con; Owner: - --- - -SELECT pg_catalog.setval('"Tracks_id_seq"', 6, true); - - --- --- Name: Job Titles Positions_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Job Titles" - ADD CONSTRAINT "Positions_pkey" PRIMARY KEY (id); - - --- --- Name: Speakers Presenters_Email_key; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Speakers" - ADD CONSTRAINT "Presenters_Email_key" UNIQUE ("Email"); - - --- --- Name: Rooms Rooms_Name_key; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Rooms" - ADD CONSTRAINT "Rooms_Name_key" UNIQUE ("Name"); - - --- --- Name: Speakers Table 115_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Speakers" - ADD CONSTRAINT "Table 115_pkey" PRIMARY KEY (id); - - --- --- Name: Rooms Table 117_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Rooms" - ADD CONSTRAINT "Table 117_pkey" PRIMARY KEY (id); - - --- --- Name: Topics Table 118_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Topics" - ADD CONSTRAINT "Table 118_pkey" PRIMARY KEY (id); - - --- --- Name: Days Table 119_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Days" - ADD CONSTRAINT "Table 119_pkey" PRIMARY KEY (id); - - --- --- Name: Talks Table 120_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talks" - ADD CONSTRAINT "Table 120_pkey" PRIMARY KEY (id); - - --- --- Name: Organizations Table 67_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Organizations" - ADD CONSTRAINT "Table 67_pkey" PRIMARY KEY (id); - - --- --- Name: Talk Topic Map Talk Topic Map_Talk_key; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talk Topic Map" - ADD CONSTRAINT "Talk Topic Map_Talk_key" UNIQUE ("Talk", "Topic"); - - --- --- Name: Talk Topic Map Talk Topic Map_id_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talk Topic Map" - ADD CONSTRAINT "Talk Topic Map_id_pkey" PRIMARY KEY (id); - - --- --- Name: Time Slots Time Slots_Time_key; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Time Slots" - ADD CONSTRAINT "Time Slots_Time_key" UNIQUE ("Slot"); - - --- --- Name: Time Slots Time Slots_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Time Slots" - ADD CONSTRAINT "Time Slots_pkey" PRIMARY KEY (id); - - --- --- Name: Tracks Tracks_pkey; Type: CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Tracks" - ADD CONSTRAINT "Tracks_pkey" PRIMARY KEY (id); - - --- --- Name: Speakers Presenters_Organization_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Speakers" - ADD CONSTRAINT "Presenters_Organization_fkey" FOREIGN KEY ("Organization") REFERENCES "Organizations"(id); - - --- --- Name: Speakers Presenters_mathesar_temp_Position_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Speakers" - ADD CONSTRAINT "Presenters_mathesar_temp_Position_fkey" FOREIGN KEY ("Job Title") REFERENCES "Job Titles"(id); - - --- --- Name: Talk Topic Map Talk Topic Map_Talk_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talk Topic Map" - ADD CONSTRAINT "Talk Topic Map_Talk_fkey" FOREIGN KEY ("Talk") REFERENCES "Talks"(id); - - --- --- Name: Talk Topic Map Talk Topic Map_Topic_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talk Topic Map" - ADD CONSTRAINT "Talk Topic Map_Topic_fkey" FOREIGN KEY ("Topic") REFERENCES "Topics"(id); - - --- --- Name: Talks Talks_Day_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talks" - ADD CONSTRAINT "Talks_Day_fkey" FOREIGN KEY ("Day") REFERENCES "Days"(id); - - --- --- Name: Talks Talks_Presenter_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talks" - ADD CONSTRAINT "Talks_Presenter_fkey" FOREIGN KEY ("Speaker") REFERENCES "Speakers"(id); - - --- --- Name: Talks Talks_Room_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talks" - ADD CONSTRAINT "Talks_Room_fkey" FOREIGN KEY ("Room") REFERENCES "Rooms"(id); - - --- --- Name: Talks Talks_Time Slot_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talks" - ADD CONSTRAINT "Talks_Time Slot_fkey" FOREIGN KEY ("Time Slot") REFERENCES "Time Slots"(id); - - --- --- Name: Talks Talks_mathesar_temp_Track_fkey; Type: FK CONSTRAINT; Schema: Mathesar Con; Owner: - --- - -ALTER TABLE ONLY "Talks" - ADD CONSTRAINT "Talks_mathesar_temp_Track_fkey" FOREIGN KEY ("Track") REFERENCES "Tracks"(id); - - --- --- PostgreSQL database dump complete --- - diff --git a/demo/management/commands/clean_stale_db.py b/demo/management/commands/clean_stale_db.py deleted file mode 100644 index c8af856f7c..0000000000 --- a/demo/management/commands/clean_stale_db.py +++ /dev/null @@ -1,94 +0,0 @@ -from datetime import timedelta - -from django.conf import settings -from django.core.management import BaseCommand -from django.utils.timezone import now -from sqlalchemy import text -from sqlalchemy.exc import OperationalError - -from db import engine -from db.metadata import get_empty_metadata -from mathesar.models.base import Database -from mathesar.state.django import reflect_db_objects - - -class Command(BaseCommand): - help = 'Cleans up the stale database created during live demo' - - def add_arguments(self, parser): - parser.add_argument( - '--force', - action='store_true', - help='Force delete a database even if it in use' - ) - parser.add_argument( - '--max-days', - action='store', - type=int, - default=3, - help='A database is considered for deletion if it has existed for more than --max-days', - ) - - def handle(self, *args, **options): - drop_all_stale_databases(*args, **options) - - -def drop_all_stale_databases(force=False, max_days=3, *args, **kwargs): - excluded_databases = [ - settings.DATABASES["default"]["NAME"], - settings.DATABASES["mathesar_tables"]["NAME"], - getattr(settings, "MATHESAR_DEMO_TEMPLATE", None), - # Exclude Postgres default databases - 'postgres', - 'template0', - 'template1' - ] - stale_databases = Database.objects.filter(created_at__lt=now() - timedelta(days=max_days)) - deleted_databases = [] - for database in stale_databases: - if database.name not in excluded_databases and database.deleted is False: - dropped = drop_mathesar_database( - database.name, - username=settings.DATABASES["default"]["USER"], - password=settings.DATABASES["default"]["PASSWORD"], - hostname=settings.DATABASES["default"]["HOST"], - root_database=settings.DATABASES["default"]["NAME"], - port=settings.DATABASES["default"]["PORT"], - force=force - ) - if dropped: - deleted_databases.append(database.name) - database.delete() - reflect_db_objects(get_empty_metadata(), db_name=database.name) - return deleted_databases - - -def drop_mathesar_database( - user_database, username, password, hostname, root_database, port, force=False -): - user_db_engine = engine.create_future_engine( - username, password, hostname, user_database, port - ) - try: - user_db_engine.connect() - except OperationalError: - # Non existent db object - user_db_engine.dispose() - return True - else: - try: - root_db_engine = engine.create_future_engine( - username, password, hostname, root_database, port, - ) - with root_db_engine.connect() as conn: - conn.execution_options(isolation_level="AUTOCOMMIT") - delete_stmt = f"DROP DATABASE {user_database} {'WITH (FORCE)' if force else ''}" - conn.execute(text(delete_stmt)) - # This database is not created using a config file, - # so their objects can be safety deleted - # as they won't be created again during reflection - return True - except OperationalError: - # Database is in use, ignore - pass - return False diff --git a/demo/management/commands/download_and_pickle_some_arxiv_data.py b/demo/management/commands/download_and_pickle_some_arxiv_data.py deleted file mode 100644 index 0a9fe91796..0000000000 --- a/demo/management/commands/download_and_pickle_some_arxiv_data.py +++ /dev/null @@ -1,17 +0,0 @@ -import pickle -from demo.install.base import ARXIV_PAPERS_PICKLE -from django.core.management import BaseCommand -from demo.management.commands.load_arxiv_data import download_arxiv_papers - - -class Command(BaseCommand): - help = 'Downloads and pickles some Arxiv data, to be later preloaded together with the Arxiv dataset.' - - def handle(self, *args, **options): - download_and_pickle_some_arxiv_data() - - -def download_and_pickle_some_arxiv_data(): - papers = download_arxiv_papers() - with open(ARXIV_PAPERS_PICKLE, 'wb') as f: - pickle.dump(papers, f) diff --git a/demo/management/commands/load_arxiv_data.py b/demo/management/commands/load_arxiv_data.py deleted file mode 100644 index 7b0728c6e6..0000000000 --- a/demo/management/commands/load_arxiv_data.py +++ /dev/null @@ -1,418 +0,0 @@ -import re -import arxiv -import json -import logging -from sqlalchemy import text - -from django.core.management import BaseCommand - -from demo.install.arxiv_skeleton import get_arxiv_db_and_schema_log_path -from mathesar.database.base import create_mathesar_engine -from mathesar.models.base import Database - - -class Command(BaseCommand): - help = 'Refreshes the arXiv data set in all relevant DBs' - - def handle(self, *args, **options): - update_our_arxiv_dbs() - - -def update_our_arxiv_dbs(): - """ - Will log an error if db-schema log is missing or cannot be deserialized. - """ - papers = download_arxiv_papers() - try: - db_schema_pairs = _get_logged_db_schema_pairs() - except Exception as e: - logging.error(e, exc_info=True) - return - for db_name, schema_name in db_schema_pairs: - db = Database.current_objects.get(name=db_name) - engine = create_mathesar_engine(db) - update_arxiv_schema(engine, schema_name, papers) - engine.dispose() - - -def update_arxiv_schema(engine, schema_name, papers): - with engine.begin() as conn: - _set_search_path(conn, schema_name) - for paper in papers: - persist_paper(conn, paper) - - -def download_arxiv_papers(): - query_expression = _construct_arxiv_search_query_expression() - arxiv_search = arxiv.Search( - query=query_expression, - max_results=50, - sort_by=arxiv.SortCriterion.LastUpdatedDate - ) - return list(arxiv_search.results()) - - -def _construct_arxiv_search_query_expression(): - """ - Meant to return papers that have the Computer Science Database category in their category set, - and don't have non-computer-science categories in their category set. That's because we only - have human-friendly names and descriptions for Computer Science categories. - """ - query = "cat:cs.DB" - for non_cs_arxiv_cat in _non_cs_arxiv_categories: - query += f' ANDNOT cat:{non_cs_arxiv_cat}' - return query - - -def _set_search_path(conn, schema_name): - set_search_path = text(f'SET search_path="{schema_name}";') - conn.execute(set_search_path) - - -def persist_paper(conn, paper): - authors = [ - author.name - for author - in paper.authors - ] - categories = [ - category - for category - in paper.categories - if _is_category_in_arxiv_taxonomy(category) - ] - author_ids = _persist_values_to_single_value_table( - conn, - table_name="Authors", - column_name="Name", - values=authors - ) - category_ids = _persist_values_to_single_value_table( - conn, - table_name="Categories", - column_name="id", - values=categories - ) - links = paper.links - link_ids = _persist_links(conn, links) - paper_id = _persist_paper(conn, paper) - _persist_paper_mappings( - conn, - paper_id=paper_id, - table_name="Paper-Author Map", - column_name="author_id", - values=author_ids - ) - _persist_paper_mappings( - conn, - paper_id=paper_id, - table_name="Paper-Category Map", - column_name="category_id", - values=category_ids - ) - _persist_paper_mappings( - conn, - paper_id=paper_id, - table_name="Paper-Link Map", - column_name="link_id", - values=link_ids - ) - - -def _persist_paper(conn, paper): - """ - See _persist_values_to_single_value_table docstring, for reason to use DO UPDATE below. - """ - arxiv_url = paper.entry_id - updated = str(paper.updated) - published = str(paper.published) - title = paper.title - summary = paper.summary - comment = paper.comment - journal_reference = paper.journal_ref - doi = paper.doi - primary_category_id = _persist_primary_category(conn, paper) - insert_query = text( - f""" - INSERT INTO "Papers" ( - "arXiv URL", - "Updated", - "Published", - "Title", - "Summary", - "Comment", - "Journal reference", - "DOI", - "Primary category" - ) - VALUES ({ - _value_list( - _prep_value(arxiv_url), - _prep_value(updated), - _prep_value(published), - _prep_value(title), - _prep_value(summary), - _prep_value(comment), - _prep_value(journal_reference), - _prep_value(doi), - _prep_value(primary_category_id), - ) - }) - ON CONFLICT ("arXiv URL") - DO UPDATE SET "DOI" = excluded."DOI" - RETURNING id - """ - ) - [paper_id], = conn.execute(insert_query) - return paper_id - - -def _persist_primary_category(conn, paper): - primary_category = paper.primary_category - if _is_category_in_arxiv_taxonomy(primary_category): - resulting_ids = _persist_values_to_single_value_table( - conn, - table_name="Categories", - column_name="id", - values=[primary_category], - ) - primary_category_id = resulting_ids.pop() - return primary_category_id - - -def _value_list(*strs): - return ', '.join(strs) - - -def _persist_links( - conn, links -): - """ - Derives the link's purpose (see _get_link_purpose), and adds that to the table as well. - """ - table_name = "Links" - href_column_name = "URL" - purpose_column_name = "Purpose" - ids = set() - for link in links: - purpose = _get_link_purpose(link) - insert_query = text( - f""" - INSERT INTO "{table_name}" ("{href_column_name}", "{purpose_column_name}") - VALUES ({_prep_value(link.href)}, {_prep_value(purpose)}) - ON CONFLICT ("{href_column_name}") - DO UPDATE SET "{href_column_name}" = excluded."{href_column_name}" - RETURNING id - """ - ) - [row_id], = conn.execute(insert_query) - ids.add(row_id) - return ids - - -def _get_link_purpose(link): - purpose = "Other" - href = link.href - if href.startswith('http://arxiv.org/abs/'): - purpose = "Abstract" - elif href.startswith('http://arxiv.org/pdf/'): - purpose = "PDF version" - return purpose - - -def _persist_values_to_single_value_table( - conn, table_name, column_name, values -): - """ - Note, we use a seemingly meaningless DO UPDATE, because, in constrast to DO - NOTHING, that makes the query's RETURNING work regardless of whether there was a - conflict. Otherwise, we would not receive an id if the record already exists. - """ - ids = set() - for value in values: - insert_query = text( - f""" - INSERT INTO "{table_name}" ("{column_name}") - VALUES ({_prep_value(value)}) - ON CONFLICT ("{column_name}") - DO UPDATE SET "{column_name}" = excluded."{column_name}" - RETURNING id - """ - ) - [row_id], = conn.execute(insert_query) - ids.add(row_id) - return ids - - -def _is_category_in_arxiv_taxonomy(category_name): - """ - Helps exclude categories not in arXiv category taxonomy [0]. - - Currently, we only have human-readable names and descriptions for CS categories, and we want - all categories we record to have those. Normally, excluding papers that have non-CS categories - would be enough (which we're doing by manipulating the arXiv API query string, but a paper - might also have categories that aren't in the arXiv taxonomy. This predicate is meant to detect - those non-arXiv categories. - - [0] https://arxiv.org/category_taxonomy - """ - is_cs_category = bool( - re.fullmatch(r"cs\.\w\w", category_name, flags=re.IGNORECASE) - ) - is_non_cs_category = (category_name in _non_cs_arxiv_categories) - return is_cs_category or is_non_cs_category - - -def _persist_paper_mappings(conn, table_name, paper_id, column_name, values): - for value in values: - insert_query = text( - f""" - INSERT INTO "{table_name}" (paper_id, {column_name}) - VALUES ({ - _value_list( - _prep_value(paper_id), - _prep_value(value), - ) - }) - ON CONFLICT DO NOTHING - """ - ) - conn.execute(insert_query) - - -def _prep_value(s): - return f"$escape_token${s}$escape_token$" if s is not None else "NULL" - - -def _get_logged_db_schema_pairs(): - """ - Note, deduplicates the resulting pairs. - """ - db_schema_log_path = get_arxiv_db_and_schema_log_path() - with open(db_schema_log_path, 'r') as lines: - return set( - tuple( - json.loads(line) - ) - for line - in lines - ) - - -_non_cs_arxiv_categories = { - 'econ.EM', - 'econ.GN', - 'econ.TH', - 'eess.AS', - 'eess.IV', - 'eess.SP', - 'eess.SY', - 'math.AC', - 'math.AG', - 'math.AP', - 'math.AT', - 'math.CA', - 'math.CO', - 'math.CT', - 'math.CV', - 'math.DG', - 'math.DS', - 'math.FA', - 'math.GM', - 'math.GN', - 'math.GR', - 'math.GT', - 'math.HO', - 'math.IT', - 'math.KT', - 'math.LO', - 'math.MG', - 'math.MP', - 'math.NA', - 'math.NT', - 'math.OA', - 'math.OC', - 'math.PR', - 'math.QA', - 'math.RA', - 'math.RT', - 'math.SG', - 'math.SP', - 'math.ST', - 'astro-ph.CO', - 'astro-ph.EP', - 'astro-ph.GA', - 'astro-ph.HE', - 'astro-ph.IM', - 'astro-ph.SR', - 'cond-mat.dis-nn', - 'cond-mat.mes-hall', - 'cond-mat.mtrl-sci', - 'cond-mat.other', - 'cond-mat.quant-gas', - 'cond-mat.soft', - 'cond-mat.stat-mech', - 'cond-mat.str-el', - 'cond-mat.supr-con', - 'gr-qc', - 'hep-ex', - 'hep-lat', - 'hep-ph', - 'hep-th', - 'math-ph', - 'nlin.AO', - 'nlin.CD', - 'nlin.CG', - 'nlin.PS', - 'nlin.SI', - 'nucl-ex', - 'nucl-th', - 'physics.acc-ph', - 'physics.ao-ph', - 'physics.app-ph', - 'physics.atm-clus', - 'physics.atom-ph', - 'physics.bio-ph', - 'physics.chem-ph', - 'physics.class-ph', - 'physics.comp-ph', - 'physics.data-an', - 'physics.ed-ph', - 'physics.flu-dyn', - 'physics.gen-ph', - 'physics.geo-ph', - 'physics.hist-ph', - 'physics.ins-det', - 'physics.med-ph', - 'physics.optics', - 'physics.plasm-ph', - 'physics.pop-ph', - 'physics.soc-ph', - 'physics.space-ph', - 'quant-ph', - 'q-bio.BM', - 'q-bio.CB', - 'q-bio.GN', - 'q-bio.MN', - 'q-bio.NC', - 'q-bio.OT', - 'q-bio.PE', - 'q-bio.QM', - 'q-bio.SC', - 'q-bio.TO', - 'q-fin.CP', - 'q-fin.EC', - 'q-fin.GN', - 'q-fin.MF', - 'q-fin.PM', - 'q-fin.PR', - 'q-fin.RM', - 'q-fin.ST', - 'q-fin.TR', - 'stat.AP', - 'stat.CO', - 'stat.ME', - 'stat.ML', - 'stat.OT', - 'stat.TH', -} diff --git a/demo/management/commands/remove_arxiv_pairs_log.py b/demo/management/commands/remove_arxiv_pairs_log.py deleted file mode 100644 index 067146de07..0000000000 --- a/demo/management/commands/remove_arxiv_pairs_log.py +++ /dev/null @@ -1,17 +0,0 @@ -import os - -from django.core.management import BaseCommand - -from demo.install.arxiv_skeleton import get_arxiv_db_and_schema_log_path - - -class Command(BaseCommand): - help = 'Cleans up the file defining arXiv data set DBs' - - def handle(self, *args, **options): - remove_arxiv_db_and_schema_log() - - -def remove_arxiv_db_and_schema_log(): - """Remove the file defining arXiv data set DBs""" - os.remove(get_arxiv_db_and_schema_log_path()) diff --git a/demo/management/commands/setup_demo_template_db.py b/demo/management/commands/setup_demo_template_db.py deleted file mode 100644 index c9b1ce20b7..0000000000 --- a/demo/management/commands/setup_demo_template_db.py +++ /dev/null @@ -1,53 +0,0 @@ -from sqlalchemy import text -from sqlalchemy.exc import OperationalError -from django.conf import settings -from django.core.management import BaseCommand - -from db.install import install_mathesar -from demo.install.datasets import load_datasets -from mathesar.database.base import create_mathesar_engine -from mathesar.models.base import Database - - -class Command(BaseCommand): - help = 'Initialize the demo template database.' - - def handle(self, *args, **options): - _setup_demo_template_db() - - -def _setup_demo_template_db(): - print("Initializing demo template database...") - - template_db_name = settings.MATHESAR_DEMO_TEMPLATE - django_model = Database.create_from_settings_key("default") - root_engine = create_mathesar_engine(django_model) - with root_engine.connect() as conn: - conn.execution_options(isolation_level="AUTOCOMMIT") - conn.execute(text(f"DROP DATABASE IF EXISTS {template_db_name} WITH (FORCE)")) - root_engine.dispose() - db_model, _ = Database.current_objects.get_or_create( - name=template_db_name, - defaults={ - 'db_name': template_db_name, - 'username': django_model.username, - 'password': django_model.password, - 'host': django_model.host, - 'port': django_model.port - } - ) - try: - install_mathesar( - database_name=template_db_name, - hostname=db_model.host, - username=db_model.username, - password=db_model.password, - port=db_model.port, - skip_confirm=True - ) - except OperationalError as e: - db_model.delete() - raise e - user_engine = create_mathesar_engine(db_model) - load_datasets(user_engine) - user_engine.dispose() diff --git a/demo/middleware.py b/demo/middleware.py deleted file mode 100644 index 6dcd459dc6..0000000000 --- a/demo/middleware.py +++ /dev/null @@ -1,58 +0,0 @@ -"""This module adds a middleware for the Live Demo.""" -import logging - -from django.conf import settings - -from demo.db_namer import get_name -from demo.utils import set_live_demo_db_name -from demo.install.arxiv_skeleton import append_db_and_arxiv_schema_to_log -from demo.install.base import ARXIV, create_demo_database -from demo.install.custom_settings import customize_settings -from demo.install.explorations import load_custom_explorations -from mathesar.database.base import create_mathesar_engine -from mathesar.models.base import Database -from mathesar.state import reset_reflection - - -logger = logging.getLogger(__name__) - - -class LiveDemoModeMiddleware: - def __init__(self, get_response): - self.get_response = get_response - - def __call__(self, request): - sessionid = request.COOKIES.get('sessionid', None) - db_name = get_name(str(sessionid)) - database, created = Database.current_objects.get_or_create( - name=db_name, - defaults={ - 'db_name': db_name, - 'username': settings.DATABASES['default']['USER'], - 'password': settings.DATABASES['default']['PASSWORD'], - 'host': settings.DATABASES['default']['HOST'], - 'port': settings.DATABASES['default']['PORT'] - } - ) - if created: - create_demo_database( - db_name, - database.username, - database.password, - database.host, - settings.DATABASES['default']['NAME'], - database.port, - settings.MATHESAR_DEMO_TEMPLATE - ) - append_db_and_arxiv_schema_to_log(db_name, ARXIV) - reset_reflection(db_name=db_name) - engine = create_mathesar_engine(database) - customize_settings(engine) - load_custom_explorations(engine) - engine.dispose() - - logger.debug(f"Using database {db_name} for sessionid {sessionid}") - request = set_live_demo_db_name(request, db_name) - - response = self.get_response(request) - return response diff --git a/demo/settings.py b/demo/settings.py deleted file mode 100644 index 6487340f17..0000000000 --- a/demo/settings.py +++ /dev/null @@ -1,27 +0,0 @@ -from config.settings.common_settings import * # noqa -from decouple import config as decouple_config - -INSTALLED_APPS += [ # noqa - "demo", - "health_check", -] - -MIDDLEWARE += [ # noqa - "demo.middleware.LiveDemoModeMiddleware", -] - - -SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') - -MATHESAR_MODE = 'PRODUCTION' -MATHESAR_LIVE_DEMO = True -MATHESAR_LIVE_DEMO_USERNAME = decouple_config('MATHESAR_LIVE_DEMO_USERNAME', default=None) -MATHESAR_LIVE_DEMO_PASSWORD = decouple_config('MATHESAR_LIVE_DEMO_PASSWORD', default=None) - -MATHESAR_DEMO_TEMPLATE = 'mathesar_demo_template' -MATHESAR_DEMO_ARXIV_LOG_PATH = decouple_config( - 'MATHESAR_DEMO_ARXIV_LOG_PATH', - default='/var/lib/mathesar/demo/arxiv_db_schema_log' -) -BASE_TEMPLATE_ADDITIONAL_SCRIPT_TEMPLATES += ['demo/analytics.html'] # noqa -ROOT_URLCONF = "demo.urls" diff --git a/demo/templates/demo/analytics.html b/demo/templates/demo/analytics.html deleted file mode 100644 index 1532f284e9..0000000000 --- a/demo/templates/demo/analytics.html +++ /dev/null @@ -1,11 +0,0 @@ - - - \ No newline at end of file diff --git a/demo/tests/test_db_namer.py b/demo/tests/test_db_namer.py deleted file mode 100644 index 6d3f848277..0000000000 --- a/demo/tests/test_db_namer.py +++ /dev/null @@ -1,31 +0,0 @@ -from demo.db_namer import get_name - - -def test_db_namer_32char_sessionid_defaults(): - name = get_name('abcdefghijklmnop0123456789ABCDEF') - assert name == 'far_ten_ink_soy_dew' - - -def test_db_namer_0char_sessionid_defaults(): - name = get_name('') - assert name == 'paw_paw_paw_paw_paw' - - -def test_db_namer_none_sessionid_defaults(): - name = get_name(None) - assert name == 'paw_paw_paw_paw_paw' - - -def test_db_namer_a_sessionid_defaults(): - name = get_name('a') - assert name == 'toy_paw_paw_paw_paw' - - -def test_db_namer_0_sessionid_defaults(): - name = get_name('0') - assert name == 'cut_paw_paw_paw_paw' - - -def test_db_namer_9char_sessionid_defaults(): - name = get_name('abcdefghi') - assert name == 'far_ten_ink_soy_fin' diff --git a/demo/urls.py b/demo/urls.py deleted file mode 100644 index 51edf8e1d9..0000000000 --- a/demo/urls.py +++ /dev/null @@ -1,22 +0,0 @@ -from django.contrib.auth.decorators import login_required -from django.urls import include, path, re_path -from django.views.generic import RedirectView -from rest_framework.decorators import api_view -from rest_framework.exceptions import PermissionDenied - -from config import urls as root_urls - - -@login_required -@api_view(['POST']) -def permission_denied(_, *args, **kwargs): - raise PermissionDenied() - - -urlpatterns = [ - re_path(r'^api/ui/v0/users/(?P[^/.]+)/password_reset/', permission_denied, name='password_reset'), - path('api/ui/v0/users/password_change/', permission_denied, name='password_change'), - path('auth/password_reset_confirm/', RedirectView.as_view(url='/'), name='password_reset'), - path(r'health/', include('health_check.urls')), - path('', include(root_urls)), -] diff --git a/demo/utils.py b/demo/utils.py deleted file mode 100644 index af67af8e1d..0000000000 --- a/demo/utils.py +++ /dev/null @@ -1,35 +0,0 @@ -from django.conf import settings - - -def get_is_live_demo_mode(): - """ - Will return true when in live demo mode. - - We want some things to behave differently in demo mode, so sometimes we - explicitly check whether we're in it. - """ - return getattr(settings, 'MATHESAR_LIVE_DEMO', False) - - -def get_live_demo_db_name(request): - """ - Retrieves the name of the database associated with this live demo session. - - In live demo mode, our demo-specific middleware embeds the name of - the database generated for the user in the request object. This retrieves - that. - """ - return request.GET.get('demo_database_name') - - -def set_live_demo_db_name(request, db_name): - """ - Embeds the db_name in the request object. - - Meant to be used in live demo mode, where we want to keep track of which - generated database is assigned to a given session. - """ - params = request.GET.copy() - params.update({'demo_database_name': db_name}) - request.GET = params - return request diff --git a/docker-compose.yml b/docker-compose.yml index 7ff8c91d71..d1560a986c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -144,7 +144,7 @@ services: # service: container_name: mathesar_service - image: mathesar/mathesar-prod:latest + image: mathesar/mathesar-testing:latest environment: # First we load the variables configured above. <<: *config diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000000..d97e100637 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1 @@ +site \ No newline at end of file diff --git a/docs/docs/api/rpc.md b/docs/docs/api/rpc.md index 53ca6293d6..133c416432 100644 --- a/docs/docs/api/rpc.md +++ b/docs/docs/api/rpc.md @@ -2,16 +2,12 @@ Mathesar has an API available at `/api/rpc/v0/` which follows the [JSON-RPC](https://www.jsonrpc.org/specification) spec version 2.0. -## About +!!! danger "Not yet stable" + The RPC API is not yet stable and may change in the future. If you build logic that depends on this API, be mindful that it may change in the future without warning or notice. -### Status +## Usage -We are currently in the process of [transitioning](https://wiki.mathesar.org/projects/2024/architecture-transition/rpc/) our API architecture from a [RESTful](rest.md) API to this RPC-style API, and we hope to have all functionality available through the RPC API by Mathesar's beta release. - -!!! caution "Stability" - The RPC API is not yet stable and may change in the future, even after we've completed the transition to the RPC API architecture. If you build logic that depends on this API, be mindful that it may change in the future without warning or notice. - -### Usage +### Requests To use an RPC function: @@ -23,7 +19,7 @@ To use an RPC function: To call function `add_from_known_connection` from the `connections` section of this page, you'd send something like: - `POST /api/rpc/v0/` + `POST /api/rpc/v0/`b ```json { @@ -37,18 +33,9 @@ To use an RPC function: } ``` ---- +### Responses -::: mathesar.rpc.connections - options: - members: - - add_from_known_connection - - add_from_scratch - - DBModelReturn - -## Responses - -### Success +#### Success Upon a successful call to an RPC function, the API will return a success object. Such an object has the following form: @@ -62,7 +49,7 @@ Upon a successful call to an RPC function, the API will return a success object. The `result` is whatever was returned by the underlying function. -### Errors +#### Errors When an error is produced by a call to the RPC endpoint, we produce an error of the following form: @@ -90,3 +77,261 @@ Other error codes are grouped according to the library that produced the Excepti - other: -25xxx Unrecognized errors from a given library return a "round number" code, so an unknown `builtins` error gets the code -31000. + +--- + +## Collaborators + +::: collaborators + options: + members: + - list_ + - add + - delete + - set_role + - CollaboratorInfo + +## Columns + +::: columns + options: + members: + - list_ + - add + - patch + - delete + - list_with_metadata + - ColumnInfo + - ColumnListReturn + - CreatableColumnInfo + - PreviewableColumnInfo + - SettableColumnInfo + - TypeOptions + - ColumnDefault + +## Column Metadata + +::: columns.metadata + options: + members: + - list_ + - set_ + - ColumnMetaDataRecord + - ColumnMetaDataBlob + +## Configured Databases + +::: databases.configured + options: + members: + - list_ + - disconnect + - ConfiguredDatabaseInfo + + +## Connections + +::: connections + options: + members: + - add_from_known_connection + - add_from_scratch + - grant_access_to_user + - ConnectionReturn + +## Constraints + +::: constraints + options: + members: + - list_ + - add + - delete + - Constraint + - ForeignKeyConstraint + - PrimaryKeyConstraint + - UniqueConstraint + - CreatableConstraintInfo + +## Data Modeling + +:::data_modeling + options: + members: + - add_foreign_key_column + - add_mapping_table + - suggest_types + - split_table + - move_columns + - MappingColumn + - SplitTableInfo + +## Databases + +::: databases + options: + members: + - get + - delete + - DatabaseInfo + +## Database Privileges + +::: databases.privileges + options: + members: + - list_direct + - replace_for_roles + - transfer_ownership + - DBPrivileges + +## Database Setup + +::: databases.setup + options: + members: + - create_new + - connect_existing + - DatabaseConnectionResult + +## Explorations + +::: explorations + options: + members: + - list_ + - get + - add + - delete + - replace + - run + - run_saved + - ExplorationInfo + - ExplorationDef + - ExplorationResult + +## Records + +:::records + options: + members: + - list_ + - get + - add + - patch + - delete + - search + - RecordList + - RecordAdded + - OrderBy + - Filter + - FilterAttnum + - FilterLiteral + - Grouping + - Group + - GroupingResponse + - SearchParam + +## Roles + +::: roles + options: + members: + - list_ + - add + - delete + - get_current_role + - set_members + - RoleInfo + - RoleMember + +## Roles Configured + +::: roles.configured + options: + members: + - list_ + - add + - delete + - set_password + - ConfiguredRoleInfo + +## Schemas + +::: schemas + options: + members: + - list_ + - get + - add + - delete + - patch + - SchemaInfo + - SchemaPatch + +## Schema Privileges + +::: schemas.privileges + options: + members: + - list_direct + - replace_for_roles + - transfer_ownership + - SchemaPrivileges + +## Servers + +::: servers + options: + members: + - list_ + - ConfiguredServerInfo + +## Tables + +::: tables + options: + members: + - list_ + - get + - add + - delete + - patch + - import_ + - get_import_preview + - list_joinable + - list_with_metadata + - get_with_metadata + - TableInfo + - AddedTableInfo + - SettableTableInfo + - JoinableTableRecord + - JoinableTableInfo + +## Table Metadata + +::: tables.metadata + options: + members: + - list_ + - set_ + - TableMetaDataBlob + - TableMetaDataRecord + +## Table Privileges + +::: tables.privileges + options: + members: + - list_direct + - replace_for_roles + - transfer_ownership + - TablePrivileges + +## Types + +::: types + options: + members: + - list_ + - TypeInfo diff --git a/docs/docs/assets/releases/0.2.0-testing.1/create-link-changes.png b/docs/docs/assets/releases/0.2.0-testing.1/create-link-changes.png new file mode 100644 index 0000000000..271f306dac Binary files /dev/null and b/docs/docs/assets/releases/0.2.0-testing.1/create-link-changes.png differ diff --git a/docs/docs/assets/releases/0.2.0-testing.1/db-settings-collaborators.png b/docs/docs/assets/releases/0.2.0-testing.1/db-settings-collaborators.png new file mode 100644 index 0000000000..8fbb6676b5 Binary files /dev/null and b/docs/docs/assets/releases/0.2.0-testing.1/db-settings-collaborators.png differ diff --git a/docs/docs/index.md b/docs/docs/index.md index 9b77bf0621..8706f8e88d 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -1,5 +1,10 @@ # Mathesar Documentation +!!! question "Help us get our beta out sooner – send us feedback!" + You're looking at the documentation for our **✨ new test build ✨**, see [release notes here](releases/0.2.0-testing.1.md). + + For a timely and stable beta release, we need feedback from as many users as possible about how this new version of Mathesar is working for you. Let us know on [this GitHub discussion](https://github.com/mathesar-foundation/mathesar/discussions/3956) or drop us a line at . + ## Welcome! Mathesar is a self-hostable open source project that provides a spreadsheet-like interface to a PostgreSQL database. Our web-based interface helps you and your collaborators set up data models, edit data, and build custom reports — no technical skills needed. You can create a new PostgreSQL database while setting up Mathesar or use our UI to interact with an existing database (or do both). @@ -17,7 +22,7 @@ This is a quick way to play with Mathesar locally, but is not appropriate for sa 1. With [Docker](https://docs.docker.com/get-docker/) installed, run: ``` - docker run -it --name mathesar -p 8000:8000 mathesar/mathesar-prod:latest + docker run -it --name mathesar -p 8000:8000 mathesar/mathesar-testing:latest ``` 1. Visit [http://localhost:8000/](http://localhost:8000/) to set up an admin user account and create a database connection. @@ -49,10 +54,7 @@ You can self-host Mathesar by following one of the guides below: ## Use Mathesar -Mathesar should be pretty intuitive to use. More documentation is coming soon, but for now, we've written some documentation for some things that could be tricky. - -- [Syncing database changes](./user-guide/syncing-db.md) if the database's structure is changed outside of Mathesar. -- How to set up [users with different access levels](./user-guide/users.md) +See our [Using Mathesar](user-guide/index.md) section for documentation on Mathesar's features. ## Contribute to Mathesar diff --git a/docs/docs/installation/build-from-source/index.md b/docs/docs/installation/build-from-source/index.md index e28efe0bf7..49b43a651a 100644 --- a/docs/docs/installation/build-from-source/index.md +++ b/docs/docs/installation/build-from-source/index.md @@ -1,5 +1,8 @@ # Install Mathesar from source on Linux +!!! danger "Not a stable release" + This is a testing build released with the goal of gathering feedback from our community. It has **[many known issues](https://github.com/mathesar-foundation/mathesar/issues?q=is%3Aissue+milestone%3Av0.2.0-testing.2+)** and is not recommended for production use. + !!! warning "For experienced Linux sysadmins" To follow this guide you need be experienced with Linux server administration, including the command line interface and some common utilities. @@ -23,7 +26,7 @@ You should have **root access** to the machine you're installing Mathesar on. You'll need to install the following system packages before you install Mathesar: -- [Python](https://www.python.org/downloads/) 3.9, 3.10, or 3.11 +- [Python](https://www.python.org/downloads/) 3.9, 3.10, or 3.11 (along with appropriate [`venv`](https://docs.python.org/3/library/venv.html) module) !!! note "Python version" @@ -39,6 +42,8 @@ You'll need to install the following system packages before you install Mathesar - [GNU gettext](https://www.gnu.org/software/gettext/) (Verify with `gettext --version`) +- [unzip](https://packages.debian.org/search?keywords=unzip) A utility tool to de-archive .zip files (Verify with `unzip -v`) + ### Domain (optional) If you want Mathesar to be accessible over the internet, you'll probably want to set up a domain or sub-domain to use. **If you don't need a domain, you can skip this section.** @@ -129,14 +134,14 @@ Then press Enter to customize this guide with your domain name. git clone https://github.com/mathesar-foundation/mathesar.git . ``` -1. Checkout the tag of the latest stable release, `{{mathesar_version}}`. +1. Check out the tag of the release or build you'd like to install, `{{mathesar_version}}`. ``` git checkout {{mathesar_version}} ``` !!! warning "Important" - If you don't run the above command you'll end up installing the latest _development_ version of Mathesar, which will be less stable. + If you don't run the above command you'll end up installing the latest _development_ version of Mathesar. 1. We need to create a python virtual environment for the Mathesar application. @@ -177,23 +182,31 @@ Then press Enter to customize this guide with your domain name. Your `.env` file should look something like this ``` + SECRET_KEY='REPLACE_THIS_WITH_YOUR_RANDOMLY_GENERATED_VALUE' DOMAIN_NAME='xDOMAIN_NAMEx' ALLOWED_HOSTS='xDOMAIN_NAMEx' - SECRET_KEY='REPLACE_THIS_WITH_YOUR_RANDOMLY_GENERATED_VALUE' # REPLACE THIS! POSTGRES_DB=mathesar_django POSTGRES_USER=mathesar - POSTGRES_PASSWORD=mathesar1234 # Do not use this password! + POSTGRES_PASSWORD=REPLACE_THIS_WITH_APPROPRIATE_PASSWORD_FOR_THE_CHOSEN_POSTGRES_USER POSTGRES_HOST=localhost POSTGRES_PORT=5432 ``` !!! tip - You can generate a [SECRET_KEY variable](../../configuration/env-variables.md#secret_key) by running: + To generate a [`SECRET_KEY`](../../configuration/env-variables.md#secret_key) you can use this [browser-based generator](https://djecrety.ir/) or run this command on MacOS or Linux: ``` echo $(cat /dev/urandom | LC_CTYPE=C tr -dc 'a-zA-Z0-9' | head -c 50) ``` + !!! tip + If you want to host Mathesar on multiple domains/subdomains you can do so by adding multiple comma separated domain names to the following env variables without a whitespace: + + ``` + DOMAIN_NAME='xDOMAIN_NAMEx,xDOMAIN_NAMEx.example.org' + ALLOWED_HOSTS='xDOMAIN_NAMEx,xDOMAIN_NAMEx.example.org' + ``` + 1. Add the environment variables to the shell You need to `export` the environment variables listed in the `.env` file to your shell. The easiest way would be to run the below command. @@ -290,7 +303,7 @@ Then press Enter to customize this guide with your domain name. 1. Check the logs to verify if Gunicorn is running without any errors ``` - journalctl --priority=notice --unit=gunicorn.service + journalctl --unit=gunicorn.service ``` ### Set up the Caddy reverse proxy @@ -307,7 +320,7 @@ Then press Enter to customize this guide with your domain name. 2. Add the configuration details to the CaddyFile ``` - https://xDOMAIN_NAMEx { + $DOMAIN_NAME { log { output stdout } @@ -385,7 +398,7 @@ Then press Enter to customize this guide with your domain name. 1. Check the logs to verify if Caddy is running without any errors ``` - journalctl --priority=notice --unit=caddy.service + journalctl --unit=caddy.service ``` ### Set up your user account diff --git a/docs/docs/installation/docker-compose/index.md b/docs/docs/installation/docker-compose/index.md index bb9cab73fd..c94849f1d2 100644 --- a/docs/docs/installation/docker-compose/index.md +++ b/docs/docs/installation/docker-compose/index.md @@ -1,5 +1,8 @@ # Install Mathesar via Docker Compose +!!! danger "Not a stable release" + This is a testing build released with the goal of gathering feedback from our community. It has **[many known issues](https://github.com/mathesar-foundation/mathesar/issues?q=is%3Aissue+milestone%3Av0.2.0-testing.2+)** and is not recommended for production use. + ## Prerequisites {% include 'snippets/docker-compose-prerequisites.md' %} diff --git a/docs/docs/releases/0.2.0-testing.1.md b/docs/docs/releases/0.2.0-testing.1.md new file mode 100644 index 0000000000..2ea7fcfcef --- /dev/null +++ b/docs/docs/releases/0.2.0-testing.1.md @@ -0,0 +1,234 @@ +# 🧪 Mathesar 0.2.0-testing.1 + +!!! danger "Not a stable release" + This is a testing build released with the goal of gathering feedback from our community. It has **[many known issues](https://github.com/mathesar-foundation/mathesar/issues?q=is%3Aissue+milestone%3Av0.2.0-testing.2+)** and is not recommended for production use. + +## Summary + +**Mathesar 0.2.0-testing.1** provides an early preview of improvements we plan to release in our beta version. We have a brand new access control system based entirely on PostgreSQL database permissions, and we've also made major improvements to the responsiveness of the Mathesar UI. We've also built out a new RPC API, and we're deprecating our REST API in favor of it. + +_This page provides a comprehensive list of all changes in the release._ + +## Installing 0.2.0-testing.1 + +You will need to create a new installation of Mathesar to use this version, which you can do via [Docker Compose](../installation/docker-compose/index.md) or [from source](../installation/build-from-source/index.md). + +We do not support upgrading from previous versions to `0.2.0-testing.1`. + +## Improvements + +### Access control based on PostgreSQL roles and privileges + +We have a brand new access control system based entirely on PostgreSQL database roles and privileges. Mathesar users must now be assigned a database role, and any operations performed by the user through the Mathesar UI will connect to the database using that role. + +![image](../assets/releases/0.2.0-testing.1/db-settings-collaborators.png) + +This gives Mathesar several new capabilities. Database administrators can set up access control directly on the database and use those roles in Mathesar. Mathesar also supports setting up PostgreSQL roles and privileges via the UI, including granular access control at the individual table level. + +This replaces our previous access control system (which was enforced at the API layer, and only supported permissions at the database and schema levels). It also eliminates the need for Mathesar to use a database superuser for day-to-day operations. + +More detailed documentation can be found in our User Guide under [Permissions](../user-guide/permissions.md) and [Users](../user-guide/users.md). + +------ + +- Initial permissions remodel _[#3626](https://github.com/mathesar-foundation/mathesar/pull/3626 "Initial permissions remodel")_ +- Implement RPC method for listing roles in server _[#3663](https://github.com/mathesar-foundation/mathesar/pull/3663 "Implement RPC endpoint for listing roles in server")_ +- Initial `database_setup` RPC methods _[#3665](https://github.com/mathesar-foundation/mathesar/pull/3665 "Initial `database_setup` RPC functions")_ +- Cast OID values to bigint in msar.get_roles _[#3667](https://github.com/mathesar-foundation/mathesar/pull/3667 "Cast OID values to bigint in msar.get_roles")_ +- RPC methods for servers, collaborators _[#3684](https://github.com/mathesar-foundation/mathesar/pull/3684 "Endpoints for servers, collaborators")_ +- RPC methods for configured roles _[#3685](https://github.com/mathesar-foundation/mathesar/pull/3685 "Endpoints for configured roles")_ +- Remove existing permissions logic on the frontend _[#3689](https://github.com/mathesar-foundation/mathesar/pull/3689 "Remove existing permissions logic on the frontend")_ +- Homepage changes for database connections _[#3710](https://github.com/mathesar-foundation/mathesar/pull/3710 "Homepage changes for database connections")_ +- Homepage UI _[#3711](https://github.com/mathesar-foundation/mathesar/pull/3711 "Homepage UI")_ +- Implement `database_privileges.list_direct` RPC method. _[#3750](https://github.com/mathesar-foundation/mathesar/pull/3750 "Implement `database_privileges.list_direct` RPC endpoint.")_ +- Implement `database_privileges.get_owner_oid_and_curr_role_db_priv` RPC method _[#3760](https://github.com/mathesar-foundation/mathesar/pull/3760 "Implement `database_privileges.get_owner_oid_and_curr_role_db_priv` RPC endpoint")_ +- Database page role configuration, collaborators, roles _[#3762](https://github.com/mathesar-foundation/mathesar/pull/3762 "Database page role configuration, collaborators, roles")_ +- Implement `roles.add` RPC endpoint _[#3769](https://github.com/mathesar-foundation/mathesar/pull/3769 "Implement `roles.add` RPC endpoint")_ +- Add `database_privileges.replace_for_roles` RPC method. _[#3781](https://github.com/mathesar-foundation/mathesar/pull/3781 "Add `database_privileges.replace_for_roles` RPC function.")_ +- Add `schema_privileges.replace_for_roles` RPC method _[#3786](https://github.com/mathesar-foundation/mathesar/pull/3786 "Add `schema_privileges.replace_for_roles` RPC function")_ +- Move DB page contexts to the route level _[#3789](https://github.com/mathesar-foundation/mathesar/pull/3789 "Move DB page contexts to the route level")_ +- Add privilege information fields _[#3795](https://github.com/mathesar-foundation/mathesar/pull/3795 "Add privilege information fields")_ +- Database permissions modal - 'Share' tab _[#3810](https://github.com/mathesar-foundation/mathesar/pull/3810 "Database permissions modal - 'Share' tab")_ +- Implement `transfer_ownership` for schemas and tables _[#3833](https://github.com/mathesar-foundation/mathesar/pull/3833 "Implement `transfer_ownership` for schemas and tables")_ +- Implement permissions modal for schemas and tables _[#3836](https://github.com/mathesar-foundation/mathesar/pull/3836 "Implement permissions modal for schemas and tables")_, _[#3838](https://github.com/mathesar-foundation/mathesar/pull/3838 "Implement permissions modal for schemas and tables")_ +- Allow setting owner at creation _[#3840](https://github.com/mathesar-foundation/mathesar/pull/3840 "Allow setting owner at Creation")_ +- Implement `roles.delete`, `databases.drop` & `databases.configured.disconnect` RPC methods _[#3858](https://github.com/mathesar-foundation/mathesar/pull/3858 "Implement `roles.delete`, `databases.drop` & `databases.configured.disconnect` endpoints")_ +- Implement 'Transfer ownership' tabs _[#3859](https://github.com/mathesar-foundation/mathesar/pull/3859 "Implement 'Transfer ownership' tabs")_ +- Permission checks in all pages, disconnect database option _[#3861](https://github.com/mathesar-foundation/mathesar/pull/3861 "permission checks in all pages, Disconnect database option")_ +- Bugfix `get_current_role` when only one role exists _[#3869](https://github.com/mathesar-foundation/mathesar/pull/3869 "Bugfix `get_current_role` when only one role exists")_ +- Grant appropriate permissions for `msar`, `__msar` and `mathesar_types` to `PUBLIC` _[#3881](https://github.com/mathesar-foundation/mathesar/pull/3881 "Grant appropriate permissions for `msar`, `__msar` and `mathesar_types` to `PUBLIC`")_ +- Filter databases for admin and standard users _[#3895](https://github.com/mathesar-foundation/mathesar/pull/3895 "Filter databases for admin and standard users")_ +- Fix logic in `get_current_role` _[#3922](https://github.com/mathesar-foundation/mathesar/pull/3922 "Fix logic in `get_current_role`")_ +- Fix quoting for role grant/revoke expressions _[#3931](https://github.com/mathesar-foundation/mathesar/pull/3931 "Fix quoting for role grant/revoke expressions")_ + +### Performance improvements and RPC API + +We've made major improvements to the responsiveness of the Mathesar UI. Loading data and data entry should be much more snappy, and importing data into Mathsar is around 50 times faster. We've also eliminated the need to manually sync database schema changes to Mathesar, any DDL changes will be reflected in the UI automatically. + +To achieve these performance benefits, we needed to overhaul our backend architecture and API. We have built out a new RPC API and our frontend UI now primarily uses that API. The RPC API [has some documentation here](../api/rpc.md), but should not be considered stable yet. + +Most of our REST API endpoints are now deprecated and will be removed soon. The `/users/` and `/data-files/` endpoints remain in use. + +- Connections RPC front end _[#3543](https://github.com/mathesar-foundation/mathesar/pull/3543 "Connections RPC front end")_ +- Exception handler tests _[#3547](https://github.com/mathesar-foundation/mathesar/pull/3547 "Exception handler tests")_ +- Fix SQL syntax highlighting in VS code for SQL tests _[#3588](https://github.com/mathesar-foundation/mathesar/pull/3588 "Fix SQL syntax highlighting in VS code for SQL tests")_ +- Remove dead front end API client code to GET one schema _[#3589](https://github.com/mathesar-foundation/mathesar/pull/3589 "Remove dead front end API client code to GET one schema")_ +- Implement `tables.delete` RPC method _[#3608](https://github.com/mathesar-foundation/mathesar/pull/3608 "Implement `tables.delete` RPC endpoint")_ +- Implement `schemas.delete` RPC method _[#3610](https://github.com/mathesar-foundation/mathesar/pull/3610 "Implement `schemas.delete` RPC method")_ +- Implement `tables.get` RPC method _[#3612](https://github.com/mathesar-foundation/mathesar/pull/3612 "Implement `tables.get` RPC endpoint")_ +- Implement `tables.add` RPC method _[#3614](https://github.com/mathesar-foundation/mathesar/pull/3614 "Implement `tables.add` RPC endpoint")_ +- Add `columns.patch` RPC method _[#3615](https://github.com/mathesar-foundation/mathesar/pull/3615 "Add `columns.patch` RPC function")_ +- Add `columns.add` RPC method _[#3616](https://github.com/mathesar-foundation/mathesar/pull/3616 "Add `columns.add` RPC function")_ +- Implement `tables.patch` RPC method _[#3618](https://github.com/mathesar-foundation/mathesar/pull/3618 "Implement `tables.patch` RPC endpoint")_ +- Implement `schemas.add` RPC method _[#3620](https://github.com/mathesar-foundation/mathesar/pull/3620 "Implement `schemas.add` RPC method")_ +- Implement `table.import` RPC method _[#3623](https://github.com/mathesar-foundation/mathesar/pull/3623 "Implement `table.import` RPC endpoint")_ +- Implement `schemas.patch` RPC method _[#3628](https://github.com/mathesar-foundation/mathesar/pull/3628 "Implement `schemas.patch` RPC method")_ +- Wire RPC methods to new models _[#3632](https://github.com/mathesar-foundation/mathesar/pull/3632 "Wire RPC functions to new models")_ +- Quoting refactor _[#3633](https://github.com/mathesar-foundation/mathesar/pull/3633 "Quoting refactor")_ +- Implement `tables.get_import_preview` RPC method _[#3635](https://github.com/mathesar-foundation/mathesar/pull/3635 "Implement `tables.get_import_preview` RPC endpoint")_ +- Auto generate table name _[#3637](https://github.com/mathesar-foundation/mathesar/pull/3637 "Auto generate table name")_ +- Add `columns.metadata.list` RPC method _[#3641](https://github.com/mathesar-foundation/mathesar/pull/3641 "Add `columns.metadata.list` RPC function")_ +- Implement `tables.metadata` `list` & `patch` RPC method _[#3646](https://github.com/mathesar-foundation/mathesar/pull/3646 "Implement `tables.metadata` `list` & `patch` RPC endpoint")_ +- Fix issue with removing comments on schemas _[#3649](https://github.com/mathesar-foundation/mathesar/pull/3649 "Allow unsetting schema comment using NULL")_ +- Drop old SQL function signature _[#3650](https://github.com/mathesar-foundation/mathesar/pull/3650 "Drop old SQL function signature")_ +- Implement tables list and delete RPC methods _[#3651](https://github.com/mathesar-foundation/mathesar/pull/3651 "Implement tables list and delete RPC APIs")_ +- Columns meta RPC patch _[#3653](https://github.com/mathesar-foundation/mathesar/pull/3653 "Columns meta RPC patch")_ +- Constraints RPC transition _[#3664](https://github.com/mathesar-foundation/mathesar/pull/3664 "Constraints RPC transition")_ +- Cast OIDs to bigint before putting in JSON _[#3666](https://github.com/mathesar-foundation/mathesar/pull/3666 "Cast OIDs to bigint before putting in JSON")_ +- RPC implementation for `tables.list_joinable` _[#3669](https://github.com/mathesar-foundation/mathesar/pull/3669 "RPC implementation for `tables.list_joinable`")_ +- Improve tables metadata _[#3672](https://github.com/mathesar-foundation/mathesar/pull/3672 "Improve tables metadata")_ +- RPC implementation for `types.list` _[#3676](https://github.com/mathesar-foundation/mathesar/pull/3676 "RPC implementation for `types.list`")_ +- Add `records.list` RPC method _[#3691](https://github.com/mathesar-foundation/mathesar/pull/3691 "Add `records.list` RPC function")_ +- RPC transition for explorations `list` and `delete` _[#3694](https://github.com/mathesar-foundation/mathesar/pull/3694 "RPC transition for explorations `list` and `delete`")_ +- Implement `explorations.run` RPC method _[#3696](https://github.com/mathesar-foundation/mathesar/pull/3696 "Implement `explorations.run` RPC endpoint")_ +- Fix return type error when re-defining `get_constraints_for_table` SQL function _[#3698](https://github.com/mathesar-foundation/mathesar/pull/3698 "Fix return type error when re-defining `get_constraints_for_table` SQL function")_ +- Fix Issues with `tables.patch` RPC method _[#3699](https://github.com/mathesar-foundation/mathesar/pull/3699 "Fix Issues with `tables.patch` RPC method")_ +- RPC records list filtering _[#3700](https://github.com/mathesar-foundation/mathesar/pull/3700 "RPC records list filtering")_ +- Return empty array when schema has no tables _[#3702](https://github.com/mathesar-foundation/mathesar/pull/3702 "Return empty array when schema has no tables")_ +- RPC function for column info with metadata _[#3703](https://github.com/mathesar-foundation/mathesar/pull/3703 "RPC endpoint for column info with metadata")_ +- First steps of RPC implementation for table page _[#3704](https://github.com/mathesar-foundation/mathesar/pull/3704 "First steps of RPC implementation for table page")_ +- Add `records.search` RPC method _[#3708](https://github.com/mathesar-foundation/mathesar/pull/3708 "Add `records.search` RPC function")_ +- Wire up valid_target_type function to column lister _[#3709](https://github.com/mathesar-foundation/mathesar/pull/3709 "Wire up valid_target_type function to column lister")_ +- Alter column metadata fields _[#3717](https://github.com/mathesar-foundation/mathesar/pull/3717 "Alter column metadata fields")_ +- Add target_table_info in `tables.list_joinable's` response _[#3718](https://github.com/mathesar-foundation/mathesar/pull/3718 "Add target_table_info in `tables.list_joinable's` response")_ +- Records grouping _[#3721](https://github.com/mathesar-foundation/mathesar/pull/3721 "Records grouping")_ +- Fix "no current database" error _[#3723](https://github.com/mathesar-foundation/mathesar/pull/3723 "Fix 'no current database' error")_ +- Implement explorations `run_saved` & `get` RPC methods _[#3725](https://github.com/mathesar-foundation/mathesar/pull/3725 "Implement explorations `run_saved` & `get` RPC endpoints")_ +- Handle new records filtering on the front end _[#3728](https://github.com/mathesar-foundation/mathesar/pull/3728 "Handle new records filtering on the front end")_ +- Implement explorations `add` & `replace` method _[#3731](https://github.com/mathesar-foundation/mathesar/pull/3731 "Implement explorations `add` & `replace` endpoint")_ +- Add `records.get` RPC method _[#3740](https://github.com/mathesar-foundation/mathesar/pull/3740 "Add `records.get` RPC function")_ +- Add `records.delete` RPC method _[#3741](https://github.com/mathesar-foundation/mathesar/pull/3741 "Add `records.delete` RPC function")_ +- Add `records.add` RPC method _[#3745](https://github.com/mathesar-foundation/mathesar/pull/3745 "Add `records.add` RPC function")_ +- Adapt front end to new RPC-based joinable tables API _[#3746](https://github.com/mathesar-foundation/mathesar/pull/3746 "Adapt front end to new RPC-based joinable tables API")_ +- Fix edge case while getting joinable tables for tables with no links _[#3748](https://github.com/mathesar-foundation/mathesar/pull/3748 "Fix edge case while getting joinable tables for tables with no links")_ +- Add `records.patch` RPC method _[#3749](https://github.com/mathesar-foundation/mathesar/pull/3749 "Add `records.patch` RPC function")_ +- Records grouping bugfix _[#3751](https://github.com/mathesar-foundation/mathesar/pull/3751 "Records grouping bugfix")_ +- Records delete bugfix _[#3754](https://github.com/mathesar-foundation/mathesar/pull/3754 "Records delete bugfix")_ +- Adapt front end to new records grouping API _[#3755](https://github.com/mathesar-foundation/mathesar/pull/3755 "Adapt front end to new records grouping API")_ +- Implement RPC records CRUD operations on front end _[#3759](https://github.com/mathesar-foundation/mathesar/pull/3759 "Implement RPC records CRUD operations on front end")_ +- Add simplified record summaries _[#3761](https://github.com/mathesar-foundation/mathesar/pull/3761 "Add simplified record summaries")_ +- Add link-adding RPC methods _[#3767](https://github.com/mathesar-foundation/mathesar/pull/3767 "Add link-adding RPC functions")_ +- Add `data_modeling.suggest_types` method. _[#3771](https://github.com/mathesar-foundation/mathesar/pull/3771 "Add `data_modeling.suggest_types` function.")_ +- Add `schema_privileges.list_direct` RPC method _[#3782](https://github.com/mathesar-foundation/mathesar/pull/3782 "Add `schema_privileges.list_direct` RPC function")_ +- Add `table_privileges.list_direct` RPC method _[#3783](https://github.com/mathesar-foundation/mathesar/pull/3783 "Add `table_privileges.list_direct` RPC function")_ +- Add `table_privileges.replace_for_roles` RPC method _[#3791](https://github.com/mathesar-foundation/mathesar/pull/3791 "Add `table_privileges.replace_for_roles` RPC function")_ +- Add `roles.get_current_role` RPC method _[#3796](https://github.com/mathesar-foundation/mathesar/pull/3796 "Add `roles.get_current_role` RPC function")_ +- Reorganize namespacing _[#3799](https://github.com/mathesar-foundation/mathesar/pull/3799 "Reorganize namespacing")_ +- Hard-code abstract types response in client _[#3800](https://github.com/mathesar-foundation/mathesar/pull/3800 "Hard-code abstract types response in client")_ +- Change response structure for record summary _[#3802](https://github.com/mathesar-foundation/mathesar/pull/3802 "Change response structure for record summary")_ +- Implement `data_modeling.split_table` RPC methods _[#3803](https://github.com/mathesar-foundation/mathesar/pull/3803 "Implement `data_modeling.split_table` RPC functions")_ +- Modify pkey finder to return False when no pkey exists _[#3804](https://github.com/mathesar-foundation/mathesar/pull/3804 "Modify pkey finder to return False when no pkey exists")_ +- Change response for `tables.add` and `tables.import` _[#3807](https://github.com/mathesar-foundation/mathesar/pull/3807 "Change response for `tables.add` and `tables.import`")_ +- Add summaries to self _[#3808](https://github.com/mathesar-foundation/mathesar/pull/3808 "Add summaries to self")_ +- Move columns SQL _[#3809](https://github.com/mathesar-foundation/mathesar/pull/3809 "Move columns SQL")_ +- Propagate RPC record summary changes to front end _[#3811](https://github.com/mathesar-foundation/mathesar/pull/3811 "Propagate RPC record summary changes to front end")_ +- Add `data_file_id` field to `TableMetaData` _[#3813](https://github.com/mathesar-foundation/mathesar/pull/3813 "Add `data_file_id` field to `TableMetaData`")_ +- Implement `data_modeling.move_columns` RPC method _[#3814](https://github.com/mathesar-foundation/mathesar/pull/3814 "Implement `data_modeling.move_columns` RPC endpoint")_ +- Get imports working again _[#3819](https://github.com/mathesar-foundation/mathesar/pull/3819 "Get imports working again")_ +- Implement `databases.privileges.transfer_ownership` RPC method _[#3821](https://github.com/mathesar-foundation/mathesar/pull/3821 "Implement `databases.privileges.transfer_ownership` RPC endpoint")_ +- Implement `tables.get_with_metadata` RPC method _[#3823](https://github.com/mathesar-foundation/mathesar/pull/3823 "Implement `tables.get_with_metadata` RPC endpoint")_ +- Use data file name as table name during import _[#3824](https://github.com/mathesar-foundation/mathesar/pull/3824 "Use data file name as table name during import")_ +- A couple small front end RPC changes _[#3825](https://github.com/mathesar-foundation/mathesar/pull/3825 "A couple small front end RPC changes")_ +- Bugfix listing records from a table with self-Referential FK _[#3831](https://github.com/mathesar-foundation/mathesar/pull/3831 "Bugfix listing records from a table with self-Referential FK")_ +- Hard-code type cast map on front end _[#3832](https://github.com/mathesar-foundation/mathesar/pull/3832 "Hard-code type cast map on front end")_ +- Alter response for schemas `add` and `patch` methods & implement `schemas.get` _[#3837](https://github.com/mathesar-foundation/mathesar/pull/3837 "Alter response for schemas `add` and `patch` endpoints & implement `schemas.get`")_ +- Propagate RPC changes to record selector _[#3843](https://github.com/mathesar-foundation/mathesar/pull/3843 "Propagate RPC changes to record selector")_ +- Use RPC API for column metadata _[#3845](https://github.com/mathesar-foundation/mathesar/pull/3845 "Use RPC API for column metadata")_ +- Propagate RPC changes to link table dialog _[#3847](https://github.com/mathesar-foundation/mathesar/pull/3847 "Propagate RPC changes to link table dialog")_ +- Fix response for `split_table` _[#3850](https://github.com/mathesar-foundation/mathesar/pull/3850 "Fix response for `split_table`")_ +- Alter response for record summaries with NULL records _[#3852](https://github.com/mathesar-foundation/mathesar/pull/3852 "Alter response for record summaries with NULL records")_ +- Make `records.get` work with stringified PK values _[#3853](https://github.com/mathesar-foundation/mathesar/pull/3853 "Make `records.get` work with stringified PK values")_ +- Enabling running of very simple explorations _[#3855](https://github.com/mathesar-foundation/mathesar/pull/3855 "Enabling running of very simple explorations")_ +- Get "extract columns" and "move columns" functionality working again _[#3856](https://github.com/mathesar-foundation/mathesar/pull/3856 "Get 'extract columns' and 'move columns' functionality working again")_ +- Allow patching records via string PKs _[#3857](https://github.com/mathesar-foundation/mathesar/pull/3857 "Allow patching records via string PKs")_ +- Implement `roles.set_members` RPC method _[#3866](https://github.com/mathesar-foundation/mathesar/pull/3866 "Implement `roles.set_members` RPC endpoint")_ +- Fix updating of table name _[#3879](https://github.com/mathesar-foundation/mathesar/pull/3879 "Fix ypdating of table name")_ +- Bugfix summarizations _[#3884](https://github.com/mathesar-foundation/mathesar/pull/3884 "Bugfix summarizations")_ +- Fix insert for table with only ID column _[#3885](https://github.com/mathesar-foundation/mathesar/pull/3885 "Fix insert for table with only ID column")_ +- Add `schema_oid` to `Explorations` model _[#3892](https://github.com/mathesar-foundation/mathesar/pull/3892 "Add `schema_oid` to `Explorations` model")_ +- Get explorations CRUD working again _[#3893](https://github.com/mathesar-foundation/mathesar/pull/3893 "Get explorations CRUD working again")_ +- Reduces frontend caching, fixes a few bugs _[#3897](https://github.com/mathesar-foundation/mathesar/pull/3897 "Reduces frontend caching, fixes a few bugs")_ +- Fix broken exploration "column added" indicators _[#3894](https://github.com/mathesar-foundation/mathesar/pull/3894 "Fix broken exploration 'column added' indicators")_ +- Fix bug when updating table twice _[#3909](https://github.com/mathesar-foundation/mathesar/pull/3909 "Fix bug when updating table twice") +- Fix response of `explorations.run` for summarizations _[#3940](https://github.com/mathesar-foundation/mathesar/pull/3940 "Fix response of `explorations.run` for summarizations")_ +- Fixes server errors when RPC exceptions are thrown while rendering common_data _[#3948](https://github.com/mathesar-foundation/mathesar/pull/3948 "Fixes server errors when RPC exceptions are thrown while rendering common_data")_ + +### Visual improvements + +We made several visual improvements to Mathesar to ensure consistency, better usability, and adherence to design guidelines. The changes were mainly to various modals and to the table inspector. + +A before-and-after comparison of the "Create Link" modal can be seen below. + +![image](../assets/releases/0.2.0-testing.1/create-link-changes.png) + +- Show a loading spinner for table pages _[#3602](https://github.com/mathesar-foundation/mathesar/pull/3602 "Show a loading spinner for table pages")_ +- UI consistency improvements for modals and table inspector _[#3860](https://github.com/mathesar-foundation/mathesar/pull/3860 "UI consistency improvements")_ + +## Bug fixes + +Bugs related to permissions or the backend overhaul are listed in the relevant sections above. The bugs listed here are unrelated to those changes. + +- Remove nonsensical cast_to_email and cast_to_uri functions _[#3564](https://github.com/mathesar-foundation/mathesar/pull/3564 "Remove nonsensical cast_to_email and cast_to_uri functions")_ +- Add 0.1.7 release notes to the nav menu _[#3569](https://github.com/mathesar-foundation/mathesar/pull/3569 "Merge pull request #3568 from mathesar-foundation/release_notes_nav")_ +- Fix error when trying to reset password of other user _[#3536](https://github.com/mathesar-foundation/mathesar/pull/3536 "Fix change password error")_ +- Handle negative numbers not being serialized correctly when copying _[#3582](https://github.com/mathesar-foundation/mathesar/pull/3582 "Handle negative numbers in TSV serialization for copy")_ +- Fix timeout when installing Mathesar on a remote DB _[#3917](https://github.com/mathesar-foundation/mathesar/pull/3917 "Fast install")_ +- Use a semver library to parse our version strings on the front end _[#3938](https://github.com/mathesar-foundation/mathesar/pull/3938 "Use a semver library to parse our version strings on the front end")_ + +## Documentation +- Updated user guide to cover new features and remove unnecessary pages _[#3910](https://github.com/mathesar-foundation/mathesar/pull/3910 "Permissions Documentation Updates")_ +- Improvements to installation from scratch documentation _[#3945](https://github.com/mathesar-foundation/mathesar/pull/3945 "Docs improvements for build from scrach")_ + + +## Maintenance + +Miscellanous work done to reduce technical debt, improve developer documentation, and maintain our workflow. + +- Refactor CellSelection data structure and store _[#3037](https://github.com/mathesar-foundation/mathesar/pull/3037 "Refactor CellSelection data structure and store")_ +- Remove API documentation infrastructure _[#3541](https://github.com/mathesar-foundation/mathesar/pull/3541 "Remove API documentation infrastructure")_ +- Remove Debian build _[#3546](https://github.com/mathesar-foundation/mathesar/pull/3546 "Remove Debian build")_ +- Update docs to add instructions for loading data from playground _[#3535](https://github.com/mathesar-foundation/mathesar/pull/3535 "Update docs to add instructions for loading data from playground")_ +- Merge 0.1.7 release back into develop _[#3558](https://github.com/mathesar-foundation/mathesar/pull/3558 "Merge pull request #3539 from mathesar-foundation/0.1.7")_ +- Resolve merge conflict for #3558 _[#3559](https://github.com/mathesar-foundation/mathesar/pull/3559 "Resolve merge conflict for #3558")_ +- Revert #3559 _[#3567](https://github.com/mathesar-foundation/mathesar/pull/3567 "Revert #3559")_ +- Bump dependencies _[#3544](https://github.com/mathesar-foundation/mathesar/pull/3544 "Bump dependencies")_, _[#3604](https://github.com/mathesar-foundation/mathesar/pull/3604 "Bump dependencies")_ +- Sort frontend imports _[#3552](https://github.com/mathesar-foundation/mathesar/pull/3552 "Sort frontend imports")_ +- Architectural overhaul _[#3587](https://github.com/mathesar-foundation/mathesar/pull/3587 "Architectural overhaul")_ +- Add SQL code standard for casting OIDs to bigint _[#3643](https://github.com/mathesar-foundation/mathesar/pull/3643 "Add SQL code standard for casting OIDs to bigint")_ +- Fix issue with SQL migrations not running when service restarts _[#3678](https://github.com/mathesar-foundation/mathesar/pull/3678 "Fix install script")_ +- Merge breaking changes into `develop` _[#3695](https://github.com/mathesar-foundation/mathesar/pull/3695 "Merge breaking changes into `develop`")_ +- Update MkDocs dependencies _[#3763](https://github.com/mathesar-foundation/mathesar/pull/3763 "Update MkDocs dependencies")_ +- Merge develop into release branch. _[#3950](https://github.com/mathesar-foundation/mathesar/pull/3950 "Merge develop into release branch.")_ + + +## Live demo changes + +We have removed code related to Mathesar's "live demo" mode since we didn't think it made sense to include code for our promotional website in the core product. If we do choose to maintain our live demo in the future, we will set up a separate microservice that performs some of these functions. + +We also set up a workflow to reset the live demo regularly to mitigate reliability issues. + +- Remove demo code and E2E infrastructure _[#3538](https://github.com/mathesar-foundation/mathesar/pull/3538 "Remove demo code and E2E infrastructure")_, _[#3551](https://github.com/mathesar-foundation/mathesar/pull/3551 "Remove E2E infrastructure")_ +- Add GitHub workflow to reset demo _[#3577](https://github.com/mathesar-foundation/mathesar/pull/3577 "Add GitHub workflow to reset demo")_ +- Updates to GH workflow for resetting demo _[#3579](https://github.com/mathesar-foundation/mathesar/pull/3579 "Updates to GH workflow for resetting demo")_ +- Updates to GH workflow to reset demo _[#3580](https://github.com/mathesar-foundation/mathesar/pull/3580 "Updates to GH workflow to reset demo")_ +- Remove the demo reset workflow _[#3581](https://github.com/mathesar-foundation/mathesar/pull/3581 "Remove the demo reset workflow")_ diff --git a/docs/docs/stylesheets/extra.css b/docs/docs/stylesheets/extra.css index fa915bb3c3..ded3c27abd 100644 --- a/docs/docs/stylesheets/extra.css +++ b/docs/docs/stylesheets/extra.css @@ -151,3 +151,17 @@ border-bottom: 2px solid var(--md-primary-fg-color--light); color: var(--md-primary-bg-color--lightest); } + +/* Make header title smaller */ +.md-header__title { font-size: 0.8rem; } +[dir="ltr"] .md-header__title { margin-left: 0; } + +/* Make header theme switcher smaller */ +.md-header__option .md-header__button { padding: 0.2rem; } +.md-header__option .md-header__button { padding: 0.2rem; } +.md-header__option .md-header__button svg { height: 1rem; width: 1rem; } + +/* Make version switcher smaller */ +.md-version { font-size: 0.7rem; font-weight: normal; } +[dir="ltr"] .md-version__current { margin-left: 0.6rem; } +.md-version__list { margin: 1.7rem 0 0 0; } diff --git a/docs/docs/user-guide/databases.md b/docs/docs/user-guide/databases.md new file mode 100644 index 0000000000..f0068b88ae --- /dev/null +++ b/docs/docs/user-guide/databases.md @@ -0,0 +1,33 @@ +# Databases + +Each installation of Mathesar can connect to multiple PostgreSQL databases. Connecting your first database will likely be your first step in using Mathesar. + +## PostgreSQL servers + +Every PostgreSQL database lives within a PostgreSQL server. + +- **External servers:** Mathesar can connect to any Internet-exposed PostgreSQL server to access the databases within it. We'll refer to these PostgreSQL servers as "external servers". + +- The **Internal Server:** Most Mathesar installations have an internal PostgreSQL server which the Mathesar application controls and utilizes for storage of application-specific metadata. + + !!! info "Some Mathesar installations don't have an internal server" + It's possible (though not recommended) to configure Mathesar to store its internal metadata in SQLite, thereby circumventing the need for an internal server. + +## Creating a new database + +If you're starting your database from scratch with Mathesar you can either: + +- Use Mathesar to create a new database within Mathesar's internal server and connect to it. This is a good option to get up and running quickly, but it might require more work later should you decide to set up periodic backups or connect other tools to the same database. Also, this option won't be possible if Mathesar was installed without an internal server. + + _OR_ + +- Use another tool to create your database on an external server and then connect Mathesar to it. You can administer that external server yourself, or choose from a variety of hosted PostgreSQL solutions such as [Amazon RDS](https://aws.amazon.com/rds/postgresql/pricing/), [Google Cloud SQL](https://cloud.google.com/sql/postgresql), [Supabase](https://supabase.com/database), and others. + +## Connecting a database + +Click the **Connect Database** button from the home page of your Mathesar application and follow the prompts. + +Once you've connected a database, you can navigate to Mathesar's page for it where you can browse the database's schemas and configure various [permissions](./permissions.md) for it. + +Mathesar will remember the connection even after the application is shut down. Your Mathesar user will be added as a "collaborator" on the database (along with the PostgreSQL role you entered). And the password you entered for that role will be stored in Mathesar's internal database, encrypted using Mathesar's [SECRET_KEY](../configuration/env-variables.md#secret_key). + diff --git a/docs/docs/user-guide/index.md b/docs/docs/user-guide/index.md index 56e2e69edf..d82d5092a3 100644 --- a/docs/docs/user-guide/index.md +++ b/docs/docs/user-guide/index.md @@ -2,6 +2,7 @@ Welcome! At this point, we assume you've installed Mathesar successfully and have logged into the web UI. -If you've connected Mathesar to an existing database, you should see all your schemas once you log in, and you can work with them as you please. +If you've connected Mathesar to an existing database, you should see all your schemas and tables once you log in, and you can work with them as you please. -If you're starting from scratch, we recommend **creating a new schema**. +!!! tip "More..." + 👈 Browse the "Using Mathesar" navigation section to find more detailed documentation pages. diff --git a/docs/docs/user-guide/permissions.md b/docs/docs/user-guide/permissions.md new file mode 100644 index 0000000000..2a096f96ff --- /dev/null +++ b/docs/docs/user-guide/permissions.md @@ -0,0 +1,123 @@ +# Mathesar's Role-Based Permissions + +Mathesar uses [PostgreSQL roles](https://www.postgresql.org/docs/current/user-manag.html) to manage permissions on your data. These roles define the actions users can perform, allowing fine-grained control over access. + +## Roles vs Users + +Each Mathesar user accesses a database through one PostgreSQL role — and the user's permissions are determined by the _role's_ permissions within PostgreSQL. + +You can read more about [how users and roles work together](./users.md#users-vs-roles). + +## The Database "Settings" tab {:#database_settings} + +Each database has its own page within Mathesar. And on that page you'll find a "Settings" tab where you can manage roles and collaborators. + +### _In Mathesar:_ Role Configuration {:#role_configuration} + +Use this section to manage the credentials (i.e. passwords) for roles that you'd like to assign to collaborators within Mathesar. Mathesar will display all [LOGIN roles](https://www.postgresql.org/docs/current/role-attributes.html#ROLE-ATTRIBUTES) that exist on the server. + +- Click **Configure in Mathesar** to store the role's password in Mathesar and allow the role to be associated with collaborators. + +- Click **Configure Password** to update the password of an already configured role. + +- Click **Remove** to remove Mathesar's stored password for a role. The role will remain on the server. + + +### _In Mathesar:_ Collaborators {:#collaborators} + +A "collaborator" is a Mathesar user who has access to a database through a specific PostgreSQL role. + +The Collaborators section allows you to add and remove collaborators and edit their corresponding PostgreSQL roles. + +!!! tip "Keep in mind" + + - You'll only be able to choose roles that have been "configured" in the above section — roles for which Mathesar has passwords stored. + + - Removing a collaborator revokes that user's access to the database _but_: + + - If the user is a Mathesar [admin](./users.md#admin-vs-standard-users), they'll be able to gain access again. + - The user will still remain in Mathesar, potentially with access to other Databases. + - The role (and it's corresponding password) will still remain configured in Mathesar. + - The role will still remain on the PostgreSQL server. + +### _On the Server:_ Roles {:#roles} + +Here you can manage roles available on the server, defining their inheritance, creating new roles, or deleting existing ones. Any changes here will be reflected for all connected databases which share this server. + +- **Create Roles**: You can create new server-level roles from this section. You can configure these roles in two ways: + 1. With login capability and a password, which you can assign to collaborators. + 2. Without login capability, to be used exclusively as a parent role to group permissions that can be inherited by other roles. You cannot assign these non-login roles to collaborators directly. +- **Define Child Roles**: PostgreSQL has a mechanism for [Role Membership](https://www.postgresql.org/docs/current/role-membership.html) wherein any role can be "granted" to any other role to form simple hierarchies or complex graph-based inheritance structures. For any role you've configured within Mathesar, you can use Mathesar to grant the role to other "child roles". +- **Drop Roles**: You can drop server-level roles that are no longer needed. This action removes the role from the server, however if the role is configured in Mathesar, it will still be displayed. Exercise caution when dropping roles, as it may affect collaborators using the dropped role in Mathesar. + +!!! note + Server roles, once added, must be configured in Mathesar under the **Role Configuration** section before they can be assigned to collaborators. + +--- + +## PostgreSQL objects {:#objects} + +In PostgreSQL, an "object" is a thing like: a database, a schema, a table, _(and some other things too, which we won't cover here)_. + +### Privileges and ownership + +- **Privileges:** Specific privileges on an object can be granted to specific roles. + + !!! example + A role can be granted the `CREATE` privilege on a schema. This allows the role to create new tables within the schema. + +- **Ownership**: Every PostgreSQL object has one and only one role said to be its "owner". The owner generally can do anything directly to the object, but not necessarily other objects contained within it. By default the owner is set to the role which created the object. + +- **Shared ownership:** While PostgreSQL has a variety of granular privileges for different actions, there are still certain actions which remain restricted to object owners. For example only the owner of a table can add new columns to it. + + While this behavior may seem limiting, it's still possible configure multiple roles to effectively "own" a single object by leveraging PostgreSQL's powerful role inheritance functionality: + + 1. We can create a third role to directly own the object and act as a sort of proxy "group". (The group role doesn't need to be a `LOGIN` role and thus doesn't require a password to be configured.) + 1. Then we can grant _that group role_ to any other roles we'd like. + 1. Those child roles will then have permission do things _as if they were the owner themselves_. + + You can use the Mathesar UI to configure an arrangement like the above, though it will require many steps. + +### Database Permissions + +The "Database Permissions" modal is accessible via a button at the top right of the database page and allows you to configure the owner and granted privileges for a database. + +- **Owner**: Each database has an owner who has administrative control over the database itself, including managing database-level permissions and transferring ownership. Ownership does not automatically extend to the objects within the database (such as schemas and tables), which may have their own separate ownership and permission settings. +- **Granted Access**: Specific permissions can be granted to roles for various actions within the database. +- **Transfer Ownership**: The current owner can transfer ownership to another role, granting them administrative control. + +For each database, the following permission levels can be granted: + +- **Connect**: Allows the role to access and connect to the database. +- **Create**: Includes Connect permissions and allows the role to create new schemas within the database. +- **Custom**: Enables the granular setting of permissions beyond the predefined options. + +### Schema Permissions + +The "Schema Permissions" modal is accessible via a button at the top right of the schema page and allows you to configure the owner and granted privileges for a schema. + +- **Owner**: Each schema has an owner who has administrative control over the schema itself, including managing schema-level permissions and transferring ownership. Ownership does not automatically extend to the objects within the schema (such as tables), which may have their own separate ownership and permission settings. +- **Granted Access**: Specific permissions can be granted to roles for various actions within the schema. +- **Transfer Ownership**: The current owner can transfer ownership to another role, granting them administrative control over the schema. + +For each schema, the following permission levels can be granted: + +- **Read**: Allows the role to access the schema and view its objects. +- **Create**: Includes Read permissions and allows the role to create new tables within the schema. +- **Custom**: Enables the granular setting of permissions beyond the predefined options. + +### Table Permissions + +The Table Permissions modal is accessible via a button from within the right-side inspector panel for each table and allows you to configure the owner and granted privileges for a table. + +- **Owner**: Each table has an owner who has administrative control over the table itself, including managing table-level permissions, transferring ownership, and modifying the table's structure (such as adding, removing, or altering columns). +- **Granted Access**: Specific permissions can be granted to roles for various actions on the table. +- **Transfer Ownership**: The current owner can transfer ownership to another role, granting them administrative control over the table. + +For each table, the following permission levels can be granted: + +- **Read**: Allows the role to access the table and read records. +- **Write**: Includes Read permissions and allows the role to insert, update, and delete records in the table. +- **Custom**: Enables the granular setting of permissions beyond the predefined options. + +You can read more about the specific privileges that can be granted in the [PostgreSQL documentation on Privileges](https://www.postgresql.org/docs/current/ddl-priv.html). diff --git a/docs/docs/user-guide/syncing-db.md b/docs/docs/user-guide/syncing-db.md deleted file mode 100644 index e36f7bf89d..0000000000 --- a/docs/docs/user-guide/syncing-db.md +++ /dev/null @@ -1,7 +0,0 @@ -# Syncing Database Changes - -Mathesar is designed to be used in tandem with other database tools. This means that if you make changes to your database structure or data outside of Mathesar, those changes will be reflected in Mathesar's UI. - -If you make structural changes to the database outside Mathesar (e.g. using another tool to add a schema, table, or column), those changes will not be reflected in Mathesar until you manually sync them. You can sync them using the "Sync Database Changes" button in the database homepage. - -External changes to data (e.g. adding, editing, or deleting *records*) will be automatically reflected without clicking this button. diff --git a/docs/docs/user-guide/users.md b/docs/docs/user-guide/users.md index 7cef8d20f5..39d1f42330 100644 --- a/docs/docs/user-guide/users.md +++ b/docs/docs/user-guide/users.md @@ -1,133 +1,50 @@ -# Users & Access Levels +# Users -Mathesar allows you to set up users with different access levels. A user's access levels determine what they can do with the data managed in Mathesar. - -Mathesar's installation process includes setting up the first user. This user is an **Admin**. +Mathesar allows multiple users to collaborate on the same data using a [role-based permissioning](./permissions.md) system. ## Managing Users 1. Click on the gear icon on the top right of the application and select **Administration**. -2. In the left sidebar, click on **Users**. - -!!! note - - Only **Admins** can add new users. - - Mathesar does not send invitation emails to new users (yet). You'll need to send the user their username and password yourself. - - The user will be prompted to change the password when they log in for the first time. - -## User Types - -Users can be either **Admin** or **Standard** users. - -### Admin users - -Admin users: - -- can manage other users (view, add, edit, delete) -- have **Manager** permissions on all databases and schemas - -You cannot set granular permissions for an **Admin** user. - -### Standard users - -By default, **Standard** users cannot see anything in Mathesar. They will need to be granted database or schema roles individually. - -## Database Roles - -There are three levels of database roles: - -- **Managers** own the database. They can edit all data in the database, as well as edit the structure of data (e.g. create tables, add and remove columns, etc.). They also manage access to the database. -- **Editors** can edit all data in the database, but cannot change the underlying data structures or manage access. -- **Viewers** have read-only access to all data in the database. They cannot make any changes. - -### Manager - -- Receives **Manager** permissions on all schemas in the database. -- Can view, add, and remove other users' access to the database. -- Can view, add, edit, and remove any schema in the database. -- Can view, add, edit, and remove any table in the database. -- Can view, add, edit, and remove any column in the database. -- Can view, add, edit, and remove any constraint in the database. -- Can view, add, edit, and remove any record in the database. -- Can view, add, edit, and remove any exploration in the database. -- Can perform "extract column" and "move column" actions - -### Editor - -- Receives **Editor** permissions on all schemas in the database. -- Can view any schema in the database. -- Can view any table in the database. -- Can view any column in the database. -- Can view any constraint in the database. -- Can view, add, edit, and remove any exploration in the database. -- Can view, add, edit, and remove any record in the database. - -### Viewer - -- Receives **Viewer** permissions on all schemas in the database. -- Can view any schema in the database. -- Can view any table in the database. -- Can view any column in the database. -- Can view any constraint in the database. -- Can view any exploration in the database. -- Can use the **Data Explorer** to create ad-hoc explorations using data in the database, but cannot save them. - -## Managing Database Roles - -!!! note - - Only **Admins** and **Database Managers** can manage access to a database. - -1. Click on the Mathesar logo on the top left of the application to go to the database page. -2. Click on the **Manage Access** button. +1. In the left sidebar, click on **Users**. -## Schema Roles +!!! info "Admin-assigned passwords" + Any user with an admin-assigned password (new or edited) will be prompted to change their password after logging in. -There are three levels of schema roles: +## Admin vs Standard users -- **Managers** own the schema. They can edit all data in the schema, as well as edit the structure of data (e.g. create tables, add and remove columns, etc.). They also manage access to the schema. -- **Editors** can edit all data in the schema, but cannot change the underlying data structures or manage access. -- **Viewers** have read-only access to all data in the schema. They cannot make any changes. +Each Mathesar user is either **Admin** or **Standard**. -### Manager +Admin users have the following capabilities which Standard users do not: -- Can view, add, and remove other users' access to the schema. -- Can view, add, edit, and remove any schema in the schema. -- Can view, add, edit, and remove any table in the schema. -- Can view, add, edit, and remove any column in the schema. -- Can view, add, edit, and remove any constraint in the schema. -- Can view, add, edit, and remove any record in the schema. -- Can view, add, edit, and remove any exploration in the schema. -- Can perform "extract column" and "move column" actions +- Admins can can manage other Mathesar users (view, add, edit, delete). +- Admins can add and remove [Databases](./databases.md). +- Admins can manage [Collaborators](./permissions.md#collaborators). This allows an Admin user to grant any Mathesar user access to a database through a PostgreSQL role that the Admin specifies. -### Editor +Upon installing Mathesar, your first user will be an Admin user. -- Can view any table in the schema. -- Can view any column in the schema. -- Can view any constraint in the schema. -- Can view, add, edit, and remove any record in the schema. -- Can view, add, edit, and remove any exploration in the schema. +## Users vs Roles -### Viewer +- A **"user"** is a Mathesar construct. Each Mathesar installation has multiple users. +- A **"role"** is a PostgreSQL construct ([docs](https://www.postgresql.org/docs/current/user-manag.html)). Each PostgreSQL server has multiple roles and multiple databases. -- Can view any table in the schema. -- Can view any column in the schema. -- Can view any constraint in the schema. -- Can view any exploration in the schema. -- Can use the **Data Explorer** to create ad-hoc explorations using data in the schema, but cannot save them. +!!! caution "Why this distinction is important" + Outside of Mathesar, it's not uncommon for people to say _user_ when referring to a PostgreSQL _role_. However, within the context of Mathesar users and roles are different things! Our documentation maintains this distinction pedantically. -## Managing Schema Roles +How users and roles work together: -!!! note - - Only **Admins**, **Database Managers**, and **Schema Managers** can manage access to a schema. +- To access a database, each Mathesar user must be assigned a PostgreSQL role to be used for that database. +- The user's permissions on actual data (in the [user database](./glossary.md#user-db)) are determined by the corresponding role's permissions within PostgreSQL. -1. Click on the Mathesar logo on the top left of the application to go to the database page. -2. Select the appropriate schema from the list to navigate to the schema's homepage. -3. Click on the **Manage Access** button. + !!! info "Admin doesn't matter here" + The user's "admin" status with Mathesar _has no effect_ on the user's ability to do things with the _data_ in a database! The admin status only affects operations on Mathesar's [internal database](./glossary.md#internal-db) such as managing collaborators and their corresponding roles. -## Order of Precedence +- You can configure separate Mathesar users to share the same PostgreSQL role if you like. This is a good option if you want those users to have the same permissions on the data. +- Or you can use separate PostgreSQL roles for different users. This is necessary any time you want different users to have different permissions on the data. +- You cannot configure one Mathesar user with two PostgreSQL role simultaneously — though you can save multiple PostgreSQL roles in Mathesar and manually switch between them if necessary. -If a user has both a **Database Role** and a **Schema Role** for a schema within the same database, the **Schema Role** will only have an effect if it grants more permissions. +See [Permissions](./permissions.md) for more information on managing roles. -Examples: +## Limitations -- If a user is a **Database Manager** but has **Viewer** permissions on a given schema, the schema role has no effect. -- If a user is a **Database Editor** but has **Manager** permissions on a given schema, the schema role will take precedence. +- Mathesar does not send invitation emails to new users (yet). You'll need to send the user their username and password yourself. +- Nor is there yet an email-based password recovery mechanism. If you are locked out of your Mathesar installation's web interface, your system administrator can still [use the command line reset any user's password](https://stackoverflow.com/questions/6358030/how-to-reset-django-admin-password). diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 58875327c4..25b24c0d98 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -23,13 +23,16 @@ nav: - Debug Mathesar: administration/debug.md - Using Mathesar: - Introduction: user-guide/index.md + - Databases: user-guide/databases.md + - Users: user-guide/users.md + - Permissions: user-guide/permissions.md - Importing data: user-guide/importing-data.md - - Syncing database changes: user-guide/syncing-db.md - - Users & access levels: user-guide/users.md - Glossary: user-guide/glossary.md - API: - - REST: api/rest.md - RPC: api/rpc.md + - REST: api/rest.md + - Test Builds: + - '0.2.0-testing.1': releases/0.2.0-testing.1.md - Releases: - '0.1.7': releases/0.1.7.md - '0.1.6': releases/0.1.6.md @@ -47,7 +50,6 @@ plugins: redirect_maps: "installation-dc/ansible-setup.md": "installation/docker-compose/index.md" "product/intro.md": "user-guide/index.md" - "product/syncing-db.md": "user-guide/syncing-db.md" "product/users.md": "user-guide/users.md" "install/index.md": "index.md" "install/docker-compose/index.md": "installation/docker-compose/index.md" @@ -57,14 +59,14 @@ plugins: - mkdocstrings: handlers: python: - paths: [..] + paths: [../mathesar/rpc/] options: + heading_level: 3 docstring_style: google separate_signature: true - show_root_heading: true - show_root_full_path: false + show_root_toc_entry: false + show_root_members_full_path: true show_source: false - show_symbol_type_heading: true group_by_category: false theme: @@ -91,6 +93,7 @@ theme: toggle: icon: material/brightness-4 name: Switch to light mode + custom_dir: overrides extra_css: - stylesheets/extra.css @@ -117,4 +120,7 @@ markdown_extensions: permalink: true extra: - mathesar_version: 0.1.7 + mathesar_version: 0.2.0-testing.1 + version: + provider: mike + alias: true diff --git a/docs/overrides/404.html b/docs/overrides/404.html new file mode 100644 index 0000000000..6a3bbee7e0 --- /dev/null +++ b/docs/overrides/404.html @@ -0,0 +1,91 @@ +{% extends "base.html" %} + +{% block content %} +

Loading...

+ + +{% endblock %} + +{% block scripts %} + {{ super() }} + +{% endblock %} \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt index 489148efae..d8a12071ad 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,8 @@ +mike==2.1.3 mkdocs==1.4.2 mkdocs-material==8.5.11 mkdocs-redirects==1.2.0 mkdocs-macros-plugin==0.7.0 mkdocs-placeholder-plugin==0.3.1 -mkdocstrings==0.24.2 -mkdocstrings-python==1.9.2 +mkdocstrings==0.25.2 +mkdocstrings-python==1.10.8 diff --git a/mathesar/__init__.py b/mathesar/__init__.py index c5b35f12fe..9cb87736d0 100644 --- a/mathesar/__init__.py +++ b/mathesar/__init__.py @@ -1,3 +1,3 @@ default_app_config = 'mathesar.apps.MathesarConfig' -__version__ = "0.1.7" +__version__ = "0.2.0-testing.1" diff --git a/mathesar/admin.py b/mathesar/admin.py index 213b0595e2..6afc0e326d 100644 --- a/mathesar/admin.py +++ b/mathesar/admin.py @@ -1,9 +1,9 @@ from django.contrib import admin from django.contrib.auth.admin import UserAdmin -from mathesar.models.base import Table, Schema, DataFile +from mathesar.models.deprecated import Table, Schema, DataFile from mathesar.models.users import User -from mathesar.models.query import UIQuery +from mathesar.models.query import Exploration from mathesar.models.shares import SharedTable, SharedQuery @@ -24,6 +24,6 @@ class MathesarUserAdmin(UserAdmin): admin.site.register(Schema) admin.site.register(DataFile) admin.site.register(User, MathesarUserAdmin) -admin.site.register(UIQuery) +admin.site.register(Exploration) admin.site.register(SharedTable) admin.site.register(SharedQuery) diff --git a/mathesar/api/README.md b/mathesar/api/README.md index 95f4252e36..3084a65ade 100644 --- a/mathesar/api/README.md +++ b/mathesar/api/README.md @@ -22,10 +22,3 @@ If you prefer a non-browser tool for API development, you'll have to: 1. Extract the key, value pair for the cookie named `sessionid` using dev tools. 1. submit that cookie with each request until it expires. 1. Repeat as necessary (e.g., when the cookie expires). - -## Generate the OpenAPI spec - -To generate the OpenAPI spec file in YAML format,run the following command in the respective environment: -```bash -python manage.py spectacular --color --file schema.yml -``` diff --git a/mathesar/api/db/viewsets/columns.py b/mathesar/api/db/viewsets/columns.py index cd535b76e4..6e6b2b76af 100644 --- a/mathesar/api/db/viewsets/columns.py +++ b/mathesar/api/db/viewsets/columns.py @@ -20,7 +20,7 @@ from mathesar.api.pagination import DefaultLimitOffsetPagination from mathesar.api.serializers.columns import ColumnSerializer from mathesar.api.utils import get_table_or_404 -from mathesar.models.base import Column +from mathesar.models.deprecated import Column class ColumnViewSet(AccessViewSetMixin, viewsets.ModelViewSet): diff --git a/mathesar/api/db/viewsets/constraints.py b/mathesar/api/db/viewsets/constraints.py index f276ae8af4..9e4aece735 100644 --- a/mathesar/api/db/viewsets/constraints.py +++ b/mathesar/api/db/viewsets/constraints.py @@ -12,7 +12,7 @@ from mathesar.api.pagination import DefaultLimitOffsetPagination from mathesar.api.serializers.constraints import ConstraintSerializer from mathesar.api.utils import get_table_or_404 -from mathesar.models.base import Constraint +from mathesar.models.deprecated import Constraint class ConstraintViewSet(AccessViewSetMixin, ListModelMixin, RetrieveModelMixin, CreateModelMixin, viewsets.GenericViewSet): diff --git a/mathesar/api/db/viewsets/data_files.py b/mathesar/api/db/viewsets/data_files.py index fc20278de3..4c3eb1a29f 100644 --- a/mathesar/api/db/viewsets/data_files.py +++ b/mathesar/api/db/viewsets/data_files.py @@ -10,7 +10,7 @@ import mathesar.api.exceptions.generic_exceptions.base_exceptions as base_api_exceptions from mathesar.api.exceptions.error_codes import ErrorCodes from mathesar.errors import InvalidTableError -from mathesar.models.base import DataFile +from mathesar.models.deprecated import DataFile from mathesar.api.pagination import DefaultLimitOffsetPagination from mathesar.api.serializers.data_files import DataFileSerializer from mathesar.utils.datafiles import create_datafile diff --git a/mathesar/api/db/viewsets/databases.py b/mathesar/api/db/viewsets/databases.py index 95d4eccbef..0edac6828e 100644 --- a/mathesar/api/db/viewsets/databases.py +++ b/mathesar/api/db/viewsets/databases.py @@ -5,7 +5,7 @@ from rest_framework.response import Response from mathesar.api.db.permissions.database import DatabaseAccessPolicy -from mathesar.models.base import Database +from mathesar.models.deprecated import Connection from mathesar.api.dj_filters import DatabaseFilter from mathesar.api.pagination import DefaultLimitOffsetPagination @@ -28,7 +28,7 @@ class ConnectionViewSet(AccessViewSetMixin, viewsets.ModelViewSet): def get_queryset(self): return self.access_policy.scope_queryset( self.request, - Database.objects.all().order_by('-created_at') + Connection.objects.all().order_by('-created_at') ) def destroy(self, request, pk=None): diff --git a/mathesar/api/db/viewsets/queries.py b/mathesar/api/db/viewsets/queries.py index b15b23c0df..c9549fd238 100644 --- a/mathesar/api/db/viewsets/queries.py +++ b/mathesar/api/db/viewsets/queries.py @@ -9,13 +9,13 @@ from rest_framework.decorators import action from mathesar.api.db.permissions.query import QueryAccessPolicy -from mathesar.api.dj_filters import UIQueryFilter +from mathesar.api.dj_filters import ExplorationFilter from mathesar.api.exceptions.query_exceptions.exceptions import DeletedColumnAccess, DeletedColumnAccessAPIException from mathesar.api.pagination import DefaultLimitOffsetPagination, TableLimitOffsetPagination from mathesar.api.serializers.queries import BaseQuerySerializer, QuerySerializer from mathesar.api.serializers.records import RecordListParameterSerializer -from mathesar.models.query import UIQuery +from mathesar.models.query import Exploration class QueryViewSet( @@ -30,7 +30,7 @@ class QueryViewSet( serializer_class = QuerySerializer pagination_class = DefaultLimitOffsetPagination filter_backends = (filters.DjangoFilterBackend,) - filterset_class = UIQueryFilter + filterset_class = ExplorationFilter permission_classes = [IsAuthenticatedOrReadOnly] access_policy = QueryAccessPolicy @@ -55,10 +55,10 @@ def _get_scoped_queryset(self): if should_queryset_be_scoped: queryset = self.access_policy.scope_queryset( self.request, - UIQuery.objects.all() + Exploration.objects.all() ) else: - queryset = UIQuery.objects.all() + queryset = Exploration.objects.all() return queryset @action(methods=['get'], detail=True) @@ -119,7 +119,7 @@ def run(self, request): paginator = TableLimitOffsetPagination() input_serializer = BaseQuerySerializer(data=request.data, context={'request': request}) input_serializer.is_valid(raise_exception=True) - query = UIQuery(**input_serializer.validated_data) + query = Exploration(**input_serializer.validated_data) try: query.replace_transformations_with_processed_transformations() query.add_defaults_to_display_names() diff --git a/mathesar/api/db/viewsets/records.py b/mathesar/api/db/viewsets/records.py index 4e14a6df83..b28782371f 100644 --- a/mathesar/api/db/viewsets/records.py +++ b/mathesar/api/db/viewsets/records.py @@ -22,7 +22,7 @@ from mathesar.api.serializers.records import RecordListParameterSerializer, RecordSerializer from mathesar.api.utils import get_table_or_404 from mathesar.functions.operations.convert import rewrite_db_function_spec_column_ids_to_names -from mathesar.models.base import Table +from mathesar.models.deprecated import Table from mathesar.utils.json import MathesarJSONRenderer diff --git a/mathesar/api/db/viewsets/schemas.py b/mathesar/api/db/viewsets/schemas.py index fb7dc64f24..153c081198 100644 --- a/mathesar/api/db/viewsets/schemas.py +++ b/mathesar/api/db/viewsets/schemas.py @@ -10,7 +10,7 @@ from mathesar.api.pagination import DefaultLimitOffsetPagination from mathesar.api.serializers.dependents import DependentSerializer, DependentFilterSerializer from mathesar.api.serializers.schemas import SchemaSerializer -from mathesar.models.base import Schema +from mathesar.models.deprecated import Schema from mathesar.utils.schemas import create_schema_and_object from mathesar.api.exceptions.validation_exceptions.exceptions import EditingPublicSchemaIsDisallowed diff --git a/mathesar/api/db/viewsets/table_settings.py b/mathesar/api/db/viewsets/table_settings.py index cb2b525f65..62690e0ad0 100644 --- a/mathesar/api/db/viewsets/table_settings.py +++ b/mathesar/api/db/viewsets/table_settings.py @@ -5,7 +5,7 @@ from mathesar.api.pagination import DefaultLimitOffsetPagination from mathesar.api.serializers.table_settings import TableSettingsSerializer from mathesar.api.utils import get_table_or_404 -from mathesar.models.base import TableSettings +from mathesar.models.deprecated import TableSettings class TableSettingsViewSet(AccessViewSetMixin, ModelViewSet): diff --git a/mathesar/api/db/viewsets/tables.py b/mathesar/api/db/viewsets/tables.py index 635ab234d4..6bb4a533f6 100644 --- a/mathesar/api/db/viewsets/tables.py +++ b/mathesar/api/db/viewsets/tables.py @@ -28,7 +28,7 @@ TableImportSerializer, MoveTableRequestSerializer ) -from mathesar.models.base import Table +from mathesar.models.deprecated import Table from mathesar.utils.tables import get_table_column_types from mathesar.utils.joins import get_processed_joinable_tables diff --git a/mathesar/api/dj_filters.py b/mathesar/api/dj_filters.py index d72e9f67cd..70d2cddb4b 100644 --- a/mathesar/api/dj_filters.py +++ b/mathesar/api/dj_filters.py @@ -1,8 +1,8 @@ from django_filters import BooleanFilter, DateTimeFromToRangeFilter, OrderingFilter from django_property_filter import PropertyFilterSet, PropertyBaseInFilter, PropertyCharFilter, PropertyOrderingFilter -from mathesar.models.base import Schema, Table, Database, DataFile -from mathesar.models.query import UIQuery +from mathesar.models.deprecated import Schema, Table, Connection, DataFile +from mathesar.models.query import Exploration class CharInFilter(PropertyBaseInFilter, PropertyCharFilter): @@ -19,7 +19,7 @@ class DatabaseFilter(PropertyFilterSet): ) class Meta: - model = Database + model = Connection fields = ['deleted'] @@ -77,7 +77,7 @@ class Meta: fields = ['name', 'schema', 'created_at', 'updated_at', 'import_verified'] -class UIQueryFilter(PropertyFilterSet): +class ExplorationFilter(PropertyFilterSet): database = CharInFilter(field_name='base_table__schema__database__name', lookup_expr='in') name = CharInFilter(field_name='name', lookup_expr='in') @@ -90,5 +90,5 @@ class UIQueryFilter(PropertyFilterSet): ) class Meta: - model = UIQuery + model = Exploration fields = ['name'] diff --git a/mathesar/api/exceptions/database_exceptions/exceptions.py b/mathesar/api/exceptions/database_exceptions/exceptions.py index a74727b764..07e2039cf6 100644 --- a/mathesar/api/exceptions/database_exceptions/exceptions.py +++ b/mathesar/api/exceptions/database_exceptions/exceptions.py @@ -13,7 +13,7 @@ MathesarAPIException, get_default_exception_detail, ) -from mathesar.models.base import Column, Constraint +from mathesar.models.deprecated import Column, Constraint from mathesar.state import get_cached_metadata @@ -159,7 +159,7 @@ def __init__( @staticmethod def err_msg(exception): if type(exception) is InvalidTypeError and exception.column_name and exception.new_type: - return f'{exception.column_name} cannot be cast to {exception.new_type}.' + return f'"{exception.column_name}" cannot be cast to {exception.new_type}.' return 'Invalid type cast requested.' diff --git a/mathesar/api/exceptions/error_codes.py b/mathesar/api/exceptions/error_codes.py index c5fd4749ce..d4abf2b1ea 100644 --- a/mathesar/api/exceptions/error_codes.py +++ b/mathesar/api/exceptions/error_codes.py @@ -64,7 +64,7 @@ class ErrorCodes(Enum): DeletedColumnAccess = 4418 IncorrectOldPassword = 4419 EditingPublicSchema = 4421 - DuplicateUIQueryInSchema = 4422 + DuplicateExplorationInSchema = 4422 IdentifierTooLong = 4423 DynamicDefaultAlterationToStaticDefault = 4424 InvalidJSONFormat = 4425 diff --git a/mathesar/api/exceptions/validation_exceptions/exceptions.py b/mathesar/api/exceptions/validation_exceptions/exceptions.py index b99e1e1f3b..9187c717aa 100644 --- a/mathesar/api/exceptions/validation_exceptions/exceptions.py +++ b/mathesar/api/exceptions/validation_exceptions/exceptions.py @@ -2,12 +2,12 @@ from mathesar.api.exceptions.validation_exceptions.base_exceptions import MathesarValidationException -class DuplicateUIQueryInSchemaAPIException(MathesarValidationException): - error_code = ErrorCodes.DuplicateUIQueryInSchema.value +class DuplicateExplorationInSchemaAPIException(MathesarValidationException): + error_code = ErrorCodes.DuplicateExplorationInSchema.value def __init__( self, - message="UIQuery names must be unique per schema", + message="Exploration names must be unique per schema", field=None, details=None, ): diff --git a/mathesar/api/pagination.py b/mathesar/api/pagination.py index 3a730c1832..fad6397ea2 100644 --- a/mathesar/api/pagination.py +++ b/mathesar/api/pagination.py @@ -5,8 +5,8 @@ from db.records.operations.group import GroupBy from mathesar.api.utils import get_table_or_404, process_annotated_records -from mathesar.models.base import Column, Table -from mathesar.models.query import UIQuery +from mathesar.models.deprecated import Column, Table +from mathesar.models.query import Exploration from mathesar.utils.preview import get_preview_info @@ -89,7 +89,7 @@ def paginate_queryset( table_columns = [{'id': column.id, 'alias': column.name} for column in columns_query] columns_to_fetch = table_columns + preview_columns - query = UIQuery(name="preview", base_table=table, initial_columns=columns_to_fetch) + query = Exploration(name="preview", base_table=table, initial_columns=columns_to_fetch) else: query = table records = query.get_records( diff --git a/mathesar/api/serializers/columns.py b/mathesar/api/serializers/columns.py index 338d3c166e..3abaa7e1b1 100644 --- a/mathesar/api/serializers/columns.py +++ b/mathesar/api/serializers/columns.py @@ -17,7 +17,7 @@ DisplayOptionsMappingSerializer, DISPLAY_OPTIONS_SERIALIZER_MAPPING_KEY, ) -from mathesar.models.base import Column +from mathesar.models.deprecated import Column class InputValueField(serializers.CharField): diff --git a/mathesar/api/serializers/constraints.py b/mathesar/api/serializers/constraints.py index 0dc9b8cde8..15401f170b 100644 --- a/mathesar/api/serializers/constraints.py +++ b/mathesar/api/serializers/constraints.py @@ -14,7 +14,7 @@ MathesarPolymorphicErrorMixin, ReadWritePolymorphicSerializerMappingMixin, ) -from mathesar.models.base import Column, Constraint, Table +from mathesar.models.deprecated import Column, Constraint, Table class TableFilteredPrimaryKeyRelatedField(serializers.PrimaryKeyRelatedField): diff --git a/mathesar/api/serializers/data_files.py b/mathesar/api/serializers/data_files.py index 2c684b0776..89cf7f9907 100644 --- a/mathesar/api/serializers/data_files.py +++ b/mathesar/api/serializers/data_files.py @@ -5,7 +5,7 @@ from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin from mathesar.errors import URLNotReachable, URLInvalidContentTypeError -from mathesar.models.base import DataFile +from mathesar.models.deprecated import DataFile SUPPORTED_URL_CONTENT_TYPES = {'text/csv', 'text/plain'} diff --git a/mathesar/api/serializers/databases.py b/mathesar/api/serializers/databases.py index 7911b909e7..4ddb3e76e4 100644 --- a/mathesar/api/serializers/databases.py +++ b/mathesar/api/serializers/databases.py @@ -3,7 +3,7 @@ from mathesar.api.display_options import DISPLAY_OPTIONS_BY_UI_TYPE from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.models.base import Database +from mathesar.models.deprecated import Connection class ConnectionSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): @@ -12,7 +12,7 @@ class ConnectionSerializer(MathesarErrorMessageMixin, serializers.ModelSerialize database = serializers.CharField(source='db_name') class Meta: - model = Database + model = Connection fields = ['id', 'nickname', 'database', 'supported_types_url', 'username', 'password', 'host', 'port'] read_only_fields = ['id', 'supported_types_url'] extra_kwargs = { @@ -20,7 +20,7 @@ class Meta: } def get_supported_types_url(self, obj): - if isinstance(obj, Database) and not self.partial: + if isinstance(obj, Connection) and not self.partial: # Only get records if we are serializing an existing table request = self.context['request'] return request.build_absolute_uri(reverse('connection-types', kwargs={'pk': obj.pk})) diff --git a/mathesar/api/serializers/dependents.py b/mathesar/api/serializers/dependents.py index 2cf5c7b792..1b7751793b 100644 --- a/mathesar/api/serializers/dependents.py +++ b/mathesar/api/serializers/dependents.py @@ -1,7 +1,7 @@ from mathesar.api.serializers.shared_serializers import MathesarPolymorphicErrorMixin, ReadOnlyPolymorphicSerializerMappingMixin from rest_framework import serializers -from mathesar.models.base import Constraint, Schema, Table +from mathesar.models.deprecated import Constraint, Schema, Table DATABASE_OBJECT_TYPES = [ diff --git a/mathesar/api/serializers/links.py b/mathesar/api/serializers/links.py index 32156aa41e..fe36bf7326 100644 --- a/mathesar/api/serializers/links.py +++ b/mathesar/api/serializers/links.py @@ -11,7 +11,7 @@ MathesarPolymorphicErrorMixin, ReadWritePolymorphicSerializerMappingMixin, ) -from mathesar.models.base import Table +from mathesar.models.deprecated import Table from mathesar.state import reset_reflection diff --git a/mathesar/api/serializers/queries.py b/mathesar/api/serializers/queries.py index 1eeb705ffc..e49be3cea7 100644 --- a/mathesar/api/serializers/queries.py +++ b/mathesar/api/serializers/queries.py @@ -7,9 +7,9 @@ from mathesar.api.db.permissions.query_table import QueryTableAccessPolicy from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.api.exceptions.validation_exceptions.exceptions import DuplicateUIQueryInSchemaAPIException -from mathesar.models.base import Table -from mathesar.models.query import UIQuery +from mathesar.api.exceptions.validation_exceptions.exceptions import DuplicateExplorationInSchemaAPIException +from mathesar.models.deprecated import Table +from mathesar.models.query import Exploration class BaseQuerySerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): @@ -20,7 +20,7 @@ class BaseQuerySerializer(MathesarErrorMessageMixin, serializers.ModelSerializer ) class Meta: - model = UIQuery + model = Exploration fields = ['schema', 'initial_columns', 'transformations', 'base_table', 'display_names'] def get_schema(self, uiquery): @@ -48,9 +48,9 @@ def _validate_uniqueness(self, attrs): if base_table: schema = base_table.schema is_duplicate_q = self._get_is_duplicate_q(name, schema) - duplicates = UIQuery.objects.filter(is_duplicate_q) + duplicates = Exploration.objects.filter(is_duplicate_q) if duplicates.exists(): - raise DuplicateUIQueryInSchemaAPIException(field='name') + raise DuplicateExplorationInSchemaAPIException(field='name') def _get_is_duplicate_q(self, name, schema): has_same_name_q = Q(name=name) @@ -71,28 +71,28 @@ class QuerySerializer(BaseQuerySerializer): columns_url = serializers.SerializerMethodField('get_columns_url') class Meta: - model = UIQuery + model = Exploration fields = '__all__' def get_records_url(self, obj): - if isinstance(obj, UIQuery) and obj.pk is not None: - # Only get records_url if we are serializing an existing persisted UIQuery + if isinstance(obj, Exploration) and obj.pk is not None: + # Only get records_url if we are serializing an existing persisted Exploration request = self.context['request'] return request.build_absolute_uri(reverse('query-records', kwargs={'pk': obj.pk})) else: return None def get_columns_url(self, obj): - if isinstance(obj, UIQuery) and obj.pk is not None: - # Only get columns_url if we are serializing an existing persisted UIQuery + if isinstance(obj, Exploration) and obj.pk is not None: + # Only get columns_url if we are serializing an existing persisted Exploration request = self.context['request'] return request.build_absolute_uri(reverse('query-columns', kwargs={'pk': obj.pk})) else: return None def get_results_url(self, obj): - if isinstance(obj, UIQuery) and obj.pk is not None: - # Only get records_url if we are serializing an existing persisted UIQuery + if isinstance(obj, Exploration) and obj.pk is not None: + # Only get records_url if we are serializing an existing persisted Exploration request = self.context['request'] return request.build_absolute_uri(reverse('query-results', kwargs={'pk': obj.pk})) else: diff --git a/mathesar/api/serializers/records.py b/mathesar/api/serializers/records.py index 9367c5dff4..506b277c9c 100644 --- a/mathesar/api/serializers/records.py +++ b/mathesar/api/serializers/records.py @@ -6,7 +6,7 @@ import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.models.base import Column +from mathesar.models.deprecated import Column from mathesar.api.utils import follows_json_number_spec from mathesar.database.types import UIType diff --git a/mathesar/api/serializers/schemas.py b/mathesar/api/serializers/schemas.py index f345362ed5..5bdf290285 100644 --- a/mathesar/api/serializers/schemas.py +++ b/mathesar/api/serializers/schemas.py @@ -6,7 +6,7 @@ from mathesar.api.db.permissions.table import TableAccessPolicy from mathesar.api.db.permissions.database import DatabaseAccessPolicy from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin -from mathesar.models.base import Database, Schema, Table +from mathesar.models.deprecated import Connection, Schema, Table from mathesar.api.exceptions.database_exceptions import ( exceptions as database_api_exceptions ) @@ -19,7 +19,7 @@ class SchemaSerializer(MathesarErrorMessageMixin, serializers.HyperlinkedModelSe connection_id = PermittedPkRelatedField( source='database', access_policy=DatabaseAccessPolicy, - queryset=Database.current_objects.all() + queryset=Connection.current_objects.all() ) description = serializers.CharField( required=False, allow_blank=True, default=None, allow_null=True diff --git a/mathesar/api/serializers/table_settings.py b/mathesar/api/serializers/table_settings.py index 576fb0c569..d0b7ddc6e4 100644 --- a/mathesar/api/serializers/table_settings.py +++ b/mathesar/api/serializers/table_settings.py @@ -2,7 +2,7 @@ from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin from mathesar.api.exceptions.validation_exceptions.exceptions import InvalidColumnOrder -from mathesar.models.base import PreviewColumnSettings, TableSettings, compute_default_preview_template, ValidationError +from mathesar.models.deprecated import PreviewColumnSettings, TableSettings, compute_default_preview_template, ValidationError class PreviewColumnSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): diff --git a/mathesar/api/serializers/tables.py b/mathesar/api/serializers/tables.py index ea3fecce3a..3f8e1a871a 100644 --- a/mathesar/api/serializers/tables.py +++ b/mathesar/api/serializers/tables.py @@ -26,7 +26,7 @@ from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin from mathesar.api.serializers.columns import SimpleColumnSerializer from mathesar.api.serializers.table_settings import TableSettingsSerializer -from mathesar.models.base import Column, Schema, Table, DataFile +from mathesar.models.deprecated import Column, Schema, Table, DataFile from mathesar.utils.tables import gen_table_name, create_table_from_datafile, create_empty_table diff --git a/mathesar/api/ui/permissions/database_role.py b/mathesar/api/ui/permissions/database_role.py index 1f75662001..090a4ded6e 100644 --- a/mathesar/api/ui/permissions/database_role.py +++ b/mathesar/api/ui/permissions/database_role.py @@ -1,7 +1,7 @@ from django.db.models import Q from rest_access_policy import AccessPolicy -from mathesar.models.base import Database +from mathesar.models.deprecated import Connection from mathesar.models.users import DatabaseRole, Role @@ -32,7 +32,7 @@ def scope_queryset(cls, request, qs): if not (request.user.is_superuser or request.user.is_anonymous): # TODO Consider moving to more reusable place allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value) - databases_with_view_access = Database.objects.filter( + databases_with_view_access = Connection.objects.filter( Q(database_role__role__in=allowed_roles) & Q(database_role__user=request.user) ) qs = qs.filter(database__in=databases_with_view_access) diff --git a/mathesar/api/ui/permissions/schema_role.py b/mathesar/api/ui/permissions/schema_role.py index a6d58521c7..b793fc98da 100644 --- a/mathesar/api/ui/permissions/schema_role.py +++ b/mathesar/api/ui/permissions/schema_role.py @@ -1,7 +1,7 @@ from django.db.models import Q from rest_access_policy import AccessPolicy -from mathesar.models.base import Database, Schema +from mathesar.models.deprecated import Connection, Schema from mathesar.models.users import DatabaseRole, Role, SchemaRole @@ -27,7 +27,7 @@ class SchemaRoleAccessPolicy(AccessPolicy): def scope_queryset(cls, request, qs): if not (request.user.is_superuser or request.user.is_anonymous): allowed_roles = (Role.MANAGER.value, Role.EDITOR.value, Role.VIEWER.value) - databases_with_view_access = Database.objects.filter( + databases_with_view_access = Connection.objects.filter( Q(database_role__role__in=allowed_roles) & Q(database_role__user=request.user) ) schema_with_view_access = Schema.objects.filter( diff --git a/mathesar/api/ui/serializers/users.py b/mathesar/api/ui/serializers/users.py index 199c439ac4..4ff9069b6a 100644 --- a/mathesar/api/ui/serializers/users.py +++ b/mathesar/api/ui/serializers/users.py @@ -1,4 +1,5 @@ from django.contrib.auth.password_validation import validate_password +from django.core.exceptions import ValidationError as DjangoValidationError from rest_access_policy import FieldAccessMixin, PermittedPkRelatedField from rest_framework import serializers @@ -7,7 +8,7 @@ from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin from mathesar.api.exceptions.validation_exceptions.exceptions import IncorrectOldPassword from mathesar.api.ui.permissions.users import UserAccessPolicy -from mathesar.models.base import Database, Schema +from mathesar.models.deprecated import Connection, Schema from mathesar.models.users import User, DatabaseRole, SchemaRole @@ -71,7 +72,7 @@ def create(self, validated_data): class ChangePasswordSerializer(MathesarErrorMessageMixin, serializers.Serializer): - password = serializers.CharField(write_only=True, required=True, validators=[validate_password]) + password = serializers.CharField(write_only=True, required=True) old_password = serializers.CharField(write_only=True, required=True) def validate_old_password(self, value): @@ -80,6 +81,13 @@ def validate_old_password(self, value): return value raise IncorrectOldPassword(field='old_password') + def validate_password(self, value): + try: + validate_password(value) + except DjangoValidationError as e: + raise e + return value + def update(self, instance, validated_data): instance.set_password(validated_data['password']) instance.save() @@ -87,7 +95,7 @@ def update(self, instance, validated_data): class PasswordResetSerializer(MathesarErrorMessageMixin, serializers.Serializer): - password = serializers.CharField(write_only=True, required=True, validators=[validate_password]) + password = serializers.CharField(write_only=True, required=True) class DatabaseRoleSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): @@ -99,7 +107,7 @@ class Meta: # Refer https://rsinger86.github.io/drf-access-policy/policy_reuse/ for the usage of `PermittedPkRelatedField` database = PermittedPkRelatedField( access_policy=DatabaseAccessPolicy, - queryset=Database.current_objects.all() + queryset=Connection.current_objects.all() ) diff --git a/mathesar/api/ui/viewsets/databases.py b/mathesar/api/ui/viewsets/databases.py index 0bdc1770cb..01a9ab26ac 100644 --- a/mathesar/api/ui/viewsets/databases.py +++ b/mathesar/api/ui/viewsets/databases.py @@ -7,7 +7,7 @@ from rest_framework.response import Response from mathesar.api.ui.permissions.ui_database import UIDatabaseAccessPolicy -from mathesar.models.base import Database +from mathesar.models.deprecated import Connection from mathesar.api.dj_filters import DatabaseFilter from mathesar.api.exceptions.validation_exceptions.exceptions import ( DictHasBadKeys, UnsupportedInstallationDatabase @@ -39,7 +39,7 @@ class ConnectionViewSet( def get_queryset(self): return self.access_policy.scope_queryset( self.request, - Database.objects.all().order_by('-created_at') + Connection.objects.all().order_by('-created_at') ) @action(methods=['get'], detail=True) diff --git a/mathesar/api/ui/viewsets/records.py b/mathesar/api/ui/viewsets/records.py index e59a1f9ece..981452e5a0 100644 --- a/mathesar/api/ui/viewsets/records.py +++ b/mathesar/api/ui/viewsets/records.py @@ -9,7 +9,7 @@ import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions from mathesar.api.utils import get_table_or_404 -from mathesar.models.base import Table +from mathesar.models.deprecated import Table class RecordViewSet(AccessViewSetMixin, viewsets.GenericViewSet): diff --git a/mathesar/api/utils.py b/mathesar/api/utils.py index 3f3a4382f6..be83a84173 100644 --- a/mathesar/api/utils.py +++ b/mathesar/api/utils.py @@ -6,8 +6,8 @@ from db.records.operations import group from mathesar.api.exceptions.error_codes import ErrorCodes -from mathesar.models.base import Table -from mathesar.models.query import UIQuery +from mathesar.models.deprecated import Table +from mathesar.models.query import Exploration from mathesar.utils.preview import column_alias_from_preview_template from mathesar.api.exceptions.generic_exceptions.base_exceptions import BadDBCredentials import psycopg @@ -40,8 +40,8 @@ def get_table_or_404(pk): def get_query_or_404(pk): try: - query = UIQuery.objects.get(id=pk) - except UIQuery.DoesNotExist: + query = Exploration.objects.get(id=pk) + except Exploration.DoesNotExist: raise generic_api_exceptions.NotFoundAPIException( NotFound, error_code=ErrorCodes.QueryNotFound.value, diff --git a/mathesar/database/base.py b/mathesar/database/base.py index d6384d3d76..c07552be26 100644 --- a/mathesar/database/base.py +++ b/mathesar/database/base.py @@ -1,3 +1,4 @@ +import psycopg from db import engine @@ -18,3 +19,19 @@ def _get_credentials_for_db_model(db_model): database=db_model.db_name, port=db_model.port, ) + + +def get_psycopg_connection(db_model): + """ + Get a psycopg connection, given a Database model. + + Args: + db_model: The Django model corresponding to the Database. + """ + return psycopg.connect( + host=db_model.host, + port=db_model.port, + dbname=db_model.db_name, + user=db_model.username, + password=db_model.password, + ) diff --git a/mathesar/examples/base.py b/mathesar/examples/base.py new file mode 100644 index 0000000000..a29da66f3c --- /dev/null +++ b/mathesar/examples/base.py @@ -0,0 +1,14 @@ +"""Constants for use by the example dataset loaders.""" +import os + +FILE_DIR = os.path.abspath(os.path.dirname(__file__)) +RESOURCES = os.path.join(FILE_DIR, "resources") +LIBRARY_ONE = os.path.join(RESOURCES, "library_without_checkouts.sql") +LIBRARY_TWO = os.path.join(RESOURCES, "library_add_checkouts.sql") +DEVCON_DATASET = os.path.join(RESOURCES, "devcon_dataset.sql") +MOVIES_SQL_TABLES = os.path.join(RESOURCES, "movie_collection_tables.sql") +MOVIES_SQL_FKS = os.path.join(RESOURCES, "movie_collection_fks.sql") +MOVIES_CSV = os.path.join(RESOURCES, 'movies_csv') +LIBRARY_MANAGEMENT = 'Library Management' +MOVIE_COLLECTION = 'Movie Collection' +MATHESAR_CON = 'Mathesar Con' diff --git a/mathesar/examples/library_dataset.py b/mathesar/examples/library_dataset.py new file mode 100644 index 0000000000..0afeae0efa --- /dev/null +++ b/mathesar/examples/library_dataset.py @@ -0,0 +1,27 @@ +"""This module contains functions to load the Library Management dataset.""" + +from psycopg import sql +from mathesar.examples.base import LIBRARY_MANAGEMENT, LIBRARY_ONE, LIBRARY_TWO + + +def load_library_dataset(conn): + """ + Load the library dataset into a "Library Management" schema. + + Args: + conn: a psycopg (3) connection for loading the data. + + Uses given connection to define database to load into. Raises an + Exception if the "Library Management" schema already exists. + """ + create_schema_query = sql.SQL("CREATE SCHEMA {}").format( + sql.Identifier(LIBRARY_MANAGEMENT) + ) + set_search_path = sql.SQL("SET search_path={}").format( + sql.Identifier(LIBRARY_MANAGEMENT) + ) + with open(LIBRARY_ONE) as f1, open(LIBRARY_TWO) as f2: + conn.execute(create_schema_query) + conn.execute(set_search_path) + conn.execute(f1.read()) + conn.execute(f2.read()) diff --git a/mathesar/examples/movies_dataset.py b/mathesar/examples/movies_dataset.py new file mode 100644 index 0000000000..735ed24611 --- /dev/null +++ b/mathesar/examples/movies_dataset.py @@ -0,0 +1,31 @@ +"""This module contains functions to load the Movie Collection dataset.""" +import os +from psycopg import sql + +from mathesar.examples.base import ( + MOVIE_COLLECTION, MOVIES_SQL_TABLES, MOVIES_CSV, MOVIES_SQL_FKS +) + + +def load_movies_dataset(conn): + """ + Load the movie example data set. + + Args: + conn: a psycopg (3) connection for loading the data. + + Uses given connection to define database to load into. Raises an + Exception if the "Movie Collection" schema already exists. + """ + with open(MOVIES_SQL_TABLES) as f, open(MOVIES_SQL_FKS) as f2: + conn.execute(f.read()) + for file in os.scandir(MOVIES_CSV): + table_name = file.name.split('.csv')[0] + copy_sql = sql.SQL( + "COPY {}.{} FROM STDIN DELIMITER ',' CSV HEADER" + ).format( + sql.Identifier(MOVIE_COLLECTION), sql.Identifier(table_name) + ) + with open(file, 'r') as csv, conn.cursor().copy(copy_sql) as copy: + copy.write(csv.read()) + conn.execute(f2.read()) diff --git a/demo/install/resources/library_add_checkouts.sql b/mathesar/examples/resources/library_add_checkouts.sql similarity index 100% rename from demo/install/resources/library_add_checkouts.sql rename to mathesar/examples/resources/library_add_checkouts.sql diff --git a/demo/install/resources/library_without_checkouts.sql b/mathesar/examples/resources/library_without_checkouts.sql similarity index 100% rename from demo/install/resources/library_without_checkouts.sql rename to mathesar/examples/resources/library_without_checkouts.sql diff --git a/demo/install/resources/movie_collection_fks.sql b/mathesar/examples/resources/movie_collection_fks.sql similarity index 100% rename from demo/install/resources/movie_collection_fks.sql rename to mathesar/examples/resources/movie_collection_fks.sql diff --git a/demo/install/resources/movie_collection_tables.sql b/mathesar/examples/resources/movie_collection_tables.sql similarity index 100% rename from demo/install/resources/movie_collection_tables.sql rename to mathesar/examples/resources/movie_collection_tables.sql diff --git a/demo/install/resources/movies_csv/Departments.csv b/mathesar/examples/resources/movies_csv/Departments.csv similarity index 100% rename from demo/install/resources/movies_csv/Departments.csv rename to mathesar/examples/resources/movies_csv/Departments.csv diff --git a/demo/install/resources/movies_csv/Genres.csv b/mathesar/examples/resources/movies_csv/Genres.csv similarity index 100% rename from demo/install/resources/movies_csv/Genres.csv rename to mathesar/examples/resources/movies_csv/Genres.csv diff --git a/demo/install/resources/movies_csv/Jobs.csv b/mathesar/examples/resources/movies_csv/Jobs.csv similarity index 100% rename from demo/install/resources/movies_csv/Jobs.csv rename to mathesar/examples/resources/movies_csv/Jobs.csv diff --git a/demo/install/resources/movies_csv/Movie Cast Map.csv b/mathesar/examples/resources/movies_csv/Movie Cast Map.csv similarity index 100% rename from demo/install/resources/movies_csv/Movie Cast Map.csv rename to mathesar/examples/resources/movies_csv/Movie Cast Map.csv diff --git a/demo/install/resources/movies_csv/Movie Crew Map.csv b/mathesar/examples/resources/movies_csv/Movie Crew Map.csv similarity index 100% rename from demo/install/resources/movies_csv/Movie Crew Map.csv rename to mathesar/examples/resources/movies_csv/Movie Crew Map.csv diff --git a/demo/install/resources/movies_csv/Movie Genre Map.csv b/mathesar/examples/resources/movies_csv/Movie Genre Map.csv similarity index 100% rename from demo/install/resources/movies_csv/Movie Genre Map.csv rename to mathesar/examples/resources/movies_csv/Movie Genre Map.csv diff --git a/demo/install/resources/movies_csv/Movie Production Company Map.csv b/mathesar/examples/resources/movies_csv/Movie Production Company Map.csv similarity index 100% rename from demo/install/resources/movies_csv/Movie Production Company Map.csv rename to mathesar/examples/resources/movies_csv/Movie Production Company Map.csv diff --git a/demo/install/resources/movies_csv/Movie Production Country Map.csv b/mathesar/examples/resources/movies_csv/Movie Production Country Map.csv similarity index 100% rename from demo/install/resources/movies_csv/Movie Production Country Map.csv rename to mathesar/examples/resources/movies_csv/Movie Production Country Map.csv diff --git a/demo/install/resources/movies_csv/Movie Spoken Language Map.csv b/mathesar/examples/resources/movies_csv/Movie Spoken Language Map.csv similarity index 100% rename from demo/install/resources/movies_csv/Movie Spoken Language Map.csv rename to mathesar/examples/resources/movies_csv/Movie Spoken Language Map.csv diff --git a/demo/install/resources/movies_csv/Movies.csv b/mathesar/examples/resources/movies_csv/Movies.csv similarity index 100% rename from demo/install/resources/movies_csv/Movies.csv rename to mathesar/examples/resources/movies_csv/Movies.csv diff --git a/demo/install/resources/movies_csv/People.csv b/mathesar/examples/resources/movies_csv/People.csv similarity index 100% rename from demo/install/resources/movies_csv/People.csv rename to mathesar/examples/resources/movies_csv/People.csv diff --git a/demo/install/resources/movies_csv/Production Companies.csv b/mathesar/examples/resources/movies_csv/Production Companies.csv similarity index 100% rename from demo/install/resources/movies_csv/Production Companies.csv rename to mathesar/examples/resources/movies_csv/Production Companies.csv diff --git a/demo/install/resources/movies_csv/Production Countries.csv b/mathesar/examples/resources/movies_csv/Production Countries.csv similarity index 100% rename from demo/install/resources/movies_csv/Production Countries.csv rename to mathesar/examples/resources/movies_csv/Production Countries.csv diff --git a/demo/install/resources/movies_csv/Spoken Languages.csv b/mathesar/examples/resources/movies_csv/Spoken Languages.csv similarity index 100% rename from demo/install/resources/movies_csv/Spoken Languages.csv rename to mathesar/examples/resources/movies_csv/Spoken Languages.csv diff --git a/demo/install/resources/movies_csv/Sub-Collections.csv b/mathesar/examples/resources/movies_csv/Sub-Collections.csv similarity index 100% rename from demo/install/resources/movies_csv/Sub-Collections.csv rename to mathesar/examples/resources/movies_csv/Sub-Collections.csv diff --git a/mathesar/imports/base.py b/mathesar/imports/base.py index 4805eb6d04..a43ba100b3 100644 --- a/mathesar/imports/base.py +++ b/mathesar/imports/base.py @@ -1,4 +1,4 @@ -from mathesar.models.base import Table +from mathesar.models.deprecated import Table from mathesar.imports.csv import create_db_table_from_csv_data_file from mathesar.imports.excel import create_db_table_from_excel_data_file from mathesar.imports.json import create_db_table_from_json_data_file diff --git a/mathesar/install.py b/mathesar/install.py index 466c0c982b..74c2c8c482 100644 --- a/mathesar/install.py +++ b/mathesar/install.py @@ -37,20 +37,11 @@ def main(skip_static_collection=False): install_on_db_with_key(database_key, skip_confirm) except IntegrityError: continue - if getattr(settings, 'MATHESAR_LIVE_DEMO', False) is True: - management.call_command( - 'createsuperuser', - '--no-input', - '--username', 'demo', - '--email', 'admin@example.com', - ) - management.call_command('setup_demo_template_db') def install_on_db_with_key(database_key, skip_confirm): - from mathesar.models.base import Database - db_model = Database.create_from_settings_key(database_key) - db_model.save() + from mathesar.models.deprecated import Connection + db_model = Connection.create_from_settings_key(database_key) try: install.install_mathesar( database_name=db_model.db_name, @@ -63,6 +54,7 @@ def install_on_db_with_key(database_key, skip_confirm): except OperationalError as e: db_model.delete() raise e + db_model.save() if __name__ == "__main__": diff --git a/mathesar/migrations/0005_release_0_1_4.py b/mathesar/migrations/0005_release_0_1_4.py index 04df9ca932..0849b2a6e3 100644 --- a/mathesar/migrations/0005_release_0_1_4.py +++ b/mathesar/migrations/0005_release_0_1_4.py @@ -1,7 +1,7 @@ from django.db import migrations, models, connection import django.contrib.postgres.fields import encrypted_fields.fields -import mathesar.models.base +import mathesar.models.deprecated def column_order_to_jsonb_postgres_fwd(apps, schema_editor): @@ -12,7 +12,7 @@ def column_order_to_jsonb_postgres_fwd(apps, schema_editor): migrations.AlterField( model_name='tablesettings', name='column_order', - field=models.JSONField(blank=True, default=None, null=True, validators=[mathesar.models.base.validate_column_order]), + field=models.JSONField(blank=True, default=None, null=True, validators=[mathesar.models.deprecated.validate_column_order]), ), diff --git a/mathesar/migrations/0007_users_permissions_remodel.py b/mathesar/migrations/0007_users_permissions_remodel.py new file mode 100644 index 0000000000..1f790a1622 --- /dev/null +++ b/mathesar/migrations/0007_users_permissions_remodel.py @@ -0,0 +1,132 @@ +# Generated by Django 4.2.11 on 2024-06-13 09:05 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import encrypted_fields.fields + +import mathesar.models.deprecated + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0006_mathesar_databases_to_model'), + ] + + operations = [ + migrations.RenameModel(old_name='Database', new_name='Connection'), + migrations.AlterField( + model_name='databaserole', + name='database', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mathesar.connection'), + ), + migrations.AlterField( + model_name='schemarole', + name='schema', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mathesar.schema'), + ), + migrations.AlterField( + model_name='tablesettings', + name='column_order', + field=models.JSONField(blank=True, default=None, null=True, validators=[mathesar.models.deprecated.validate_column_order]), + ), + migrations.RenameModel(old_name='UIQuery', new_name='Exploration'), + migrations.CreateModel( + name='Server', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('host', models.CharField(max_length=255)), + ('port', models.IntegerField()), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddConstraint( + model_name='server', + constraint=models.UniqueConstraint(fields=('host', 'port'), name='unique_server'), + ), + migrations.CreateModel( + name='Database', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('name', models.CharField(max_length=128)), + ('server', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='databases', to='mathesar.server')), + ], + ), + migrations.AddConstraint( + model_name='database', + constraint=models.UniqueConstraint(fields=('name', 'server'), name='unique_database'), + ), + migrations.AddConstraint( + model_name='database', + constraint=models.UniqueConstraint(fields=('id', 'server'), name='database_id_server_index'), + ), + migrations.CreateModel( + name='Role', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('name', models.CharField(max_length=255)), + ('password', encrypted_fields.fields.EncryptedCharField(max_length=255)), + ('server', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='roles', to='mathesar.server')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddConstraint( + model_name='role', + constraint=models.UniqueConstraint(fields=('name', 'server'), name='unique_role'), + ), + migrations.AddConstraint( + model_name='role', + constraint=models.UniqueConstraint(fields=('id', 'server'), name='role_id_server_index'), + ), + migrations.CreateModel( + name='UserDatabaseRoleMap', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('database', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mathesar.database')), + ('role', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mathesar.role')), + ('server', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mathesar.server')), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.AddConstraint( + model_name='userdatabaserolemap', + constraint=models.UniqueConstraint(fields=('user', 'database'), name='user_one_role_per_database'), + ), + migrations.RunSQL( + sql=""" + ALTER TABLE mathesar_userdatabaserolemap + ADD CONSTRAINT userdatabaserolemap_database_server_integrity + FOREIGN KEY (database_id, server_id) + REFERENCES mathesar_database(id, server_id); + """, + reverse_sql=""" + ALTER TABLE mathesar_userdatabaserolemap + DROP CONSTRAINT userdatabaserolemap_database_server_integrity; + """ + ), + migrations.RunSQL( + sql=""" + ALTER TABLE mathesar_userdatabaserolemap + ADD CONSTRAINT userdatabaserolemap_role_server_integrity + FOREIGN KEY (role_id, server_id) + REFERENCES mathesar_role(id, server_id); + """, + reverse_sql=""" + ALTER TABLE mathesar_userdatabaserolemap + DROP CONSTRAINT userdatabaserolemap_role_server_integrity; + """ + ), + ] diff --git a/mathesar/migrations/0008_add_metadata_models.py b/mathesar/migrations/0008_add_metadata_models.py new file mode 100644 index 0000000000..13dd921ad3 --- /dev/null +++ b/mathesar/migrations/0008_add_metadata_models.py @@ -0,0 +1,46 @@ +# Generated by Django 4.2.11 on 2024-06-20 08:02 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0007_users_permissions_remodel'), + ] + + operations = [ + migrations.CreateModel( + name='ColumnMetaData', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('table_oid', models.PositiveIntegerField()), + ('attnum', models.PositiveIntegerField()), + ('bool_input', models.CharField(blank=True, choices=[('dropdown', 'dropdown'), ('checkbox', 'checkbox')])), + ('bool_true', models.CharField(default='True')), + ('bool_false', models.CharField(default='False')), + ('num_min_frac_digits', models.PositiveIntegerField(blank=True)), + ('num_max_frac_digits', models.PositiveIntegerField(blank=True)), + ('num_show_as_perc', models.BooleanField(default=False)), + ('mon_currency_symbol', models.CharField(default='$')), + ('mon_currency_location', models.CharField(choices=[('after-minus', 'after-minus'), ('end-with-space', 'end-with-space')], default='after-minus')), + ('time_format', models.CharField(blank=True)), + ('date_format', models.CharField(blank=True)), + ('duration_min', models.CharField(blank=True, max_length=255)), + ('duration_max', models.CharField(blank=True, max_length=255)), + ('duration_show_units', models.BooleanField(default=True)), + ('database', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mathesar.database')), + ], + ), + migrations.AddConstraint( + model_name='columnmetadata', + constraint=models.UniqueConstraint(fields=('database', 'table_oid', 'attnum'), name='unique_column_metadata'), + ), + migrations.AddConstraint( + model_name='columnmetadata', + constraint=models.CheckConstraint(check=models.Q(('num_max_frac_digits__lte', 20), ('num_min_frac_digits__lte', 20), ('num_min_frac_digits__lte', models.F('num_max_frac_digits'))), name='frac_digits_integrity'), + ), + ] diff --git a/mathesar/migrations/0009_add_column_metadata_model.py b/mathesar/migrations/0009_add_column_metadata_model.py new file mode 100644 index 0000000000..6b53e15f4f --- /dev/null +++ b/mathesar/migrations/0009_add_column_metadata_model.py @@ -0,0 +1,52 @@ +# Generated by Django 4.2.11 on 2024-07-01 18:07 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0008_add_metadata_models'), + ] + + operations = [ + migrations.AlterField( + model_name='columnmetadata', + name='attnum', + field=models.SmallIntegerField(), + ), + migrations.AlterField( + model_name='columnmetadata', + name='num_max_frac_digits', + field=models.PositiveIntegerField(default=20), + ), + migrations.AlterField( + model_name='columnmetadata', + name='num_min_frac_digits', + field=models.PositiveIntegerField(default=0), + ), + migrations.AlterField( + model_name='columnmetadata', + name='table_oid', + field=models.PositiveBigIntegerField(), + ), + migrations.CreateModel( + name='TableMetaData', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('table_oid', models.PositiveBigIntegerField()), + ('import_verified', models.BooleanField(default=False)), + ('column_order', models.JSONField(default=list)), + ('record_summary_customized', models.BooleanField(default=False)), + ('record_summary_template', models.CharField(blank=True, max_length=255)), + ('database', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mathesar.database')), + ], + ), + migrations.AddConstraint( + model_name='tablemetadata', + constraint=models.UniqueConstraint(fields=('database', 'table_oid'), name='unique_table_metadata'), + ), + ] diff --git a/mathesar/migrations/0010_alter_tablemetadata_column_order_and_more.py b/mathesar/migrations/0010_alter_tablemetadata_column_order_and_more.py new file mode 100644 index 0000000000..8c11c8fdf3 --- /dev/null +++ b/mathesar/migrations/0010_alter_tablemetadata_column_order_and_more.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.11 on 2024-07-10 18:16 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0009_add_column_metadata_model'), + ] + + operations = [ + migrations.AlterField( + model_name='tablemetadata', + name='column_order', + field=models.JSONField(null=True), + ), + migrations.AlterField( + model_name='tablemetadata', + name='import_verified', + field=models.BooleanField(null=True), + ), + migrations.AlterField( + model_name='tablemetadata', + name='record_summary_customized', + field=models.BooleanField(null=True), + ), + migrations.AlterField( + model_name='tablemetadata', + name='record_summary_template', + field=models.CharField(max_length=255, null=True), + ), + ] diff --git a/mathesar/migrations/0011_explorations.py b/mathesar/migrations/0011_explorations.py new file mode 100644 index 0000000000..5d7a951cd6 --- /dev/null +++ b/mathesar/migrations/0011_explorations.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.11 on 2024-07-16 13:07 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0010_alter_tablemetadata_column_order_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='Explorations', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('name', models.CharField(max_length=128, unique=True)), + ('base_table_oid', models.PositiveBigIntegerField()), + ('initial_columns', models.JSONField()), + ('transformations', models.JSONField(null=True)), + ('display_options', models.JSONField(null=True)), + ('display_names', models.JSONField()), + ('description', models.CharField(null=True)), + ('database', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mathesar.database')), + ], + options={ + 'abstract': False, + }, + ), + ] diff --git a/mathesar/migrations/0011_rename_role_configuredrole.py b/mathesar/migrations/0011_rename_role_configuredrole.py new file mode 100644 index 0000000000..7dc768960e --- /dev/null +++ b/mathesar/migrations/0011_rename_role_configuredrole.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.11 on 2024-07-11 19:32 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0010_alter_tablemetadata_column_order_and_more'), + ] + + operations = [ + migrations.RenameModel( + old_name='Role', + new_name='ConfiguredRole', + ), + migrations.RenameField( + model_name='userdatabaserolemap', + old_name='role', + new_name='configured_role', + ), + ] diff --git a/mathesar/migrations/0012_merge_20240718_0628.py b/mathesar/migrations/0012_merge_20240718_0628.py new file mode 100644 index 0000000000..d13e1ea05c --- /dev/null +++ b/mathesar/migrations/0012_merge_20240718_0628.py @@ -0,0 +1,14 @@ +# Generated by Django 4.2.11 on 2024-07-18 06:28 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0011_explorations'), + ('mathesar', '0011_rename_role_configuredrole'), + ] + + operations = [ + ] diff --git a/mathesar/migrations/0013_alter_columnmetadata_bool_false_and_more.py b/mathesar/migrations/0013_alter_columnmetadata_bool_false_and_more.py new file mode 100644 index 0000000000..3fb9759fbf --- /dev/null +++ b/mathesar/migrations/0013_alter_columnmetadata_bool_false_and_more.py @@ -0,0 +1,78 @@ +# Generated by Django 4.2.11 on 2024-07-24 10:53 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0012_merge_20240718_0628'), + ] + + operations = [ + migrations.AlterField( + model_name='columnmetadata', + name='bool_false', + field=models.CharField(null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='bool_input', + field=models.CharField(choices=[('dropdown', 'dropdown'), ('checkbox', 'checkbox')], null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='bool_true', + field=models.CharField(null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='date_format', + field=models.CharField(null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='duration_max', + field=models.CharField(max_length=255, null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='duration_min', + field=models.CharField(max_length=255, null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='duration_show_units', + field=models.BooleanField(null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='mon_currency_location', + field=models.CharField(choices=[('after-minus', 'after-minus'), ('end-with-space', 'end-with-space')], null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='mon_currency_symbol', + field=models.CharField(null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='num_max_frac_digits', + field=models.PositiveIntegerField(null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='num_min_frac_digits', + field=models.PositiveIntegerField(null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='num_show_as_perc', + field=models.BooleanField(null=True), + ), + migrations.AlterField( + model_name='columnmetadata', + name='time_format', + field=models.CharField(null=True), + ), + ] diff --git a/mathesar/migrations/0014_remove_columnmetadata_duration_show_units_and_more.py b/mathesar/migrations/0014_remove_columnmetadata_duration_show_units_and_more.py new file mode 100644 index 0000000000..fe00b4f652 --- /dev/null +++ b/mathesar/migrations/0014_remove_columnmetadata_duration_show_units_and_more.py @@ -0,0 +1,31 @@ +# Generated by Django 4.2.11 on 2024-07-31 12:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0013_alter_columnmetadata_bool_false_and_more'), + ] + + operations = [ + migrations.RemoveField( + model_name='columnmetadata', + name='duration_show_units', + ), + migrations.RemoveField( + model_name='columnmetadata', + name='num_show_as_perc', + ), + migrations.AddField( + model_name='columnmetadata', + name='num_format', + field=models.CharField(choices=[('english', 'english'), ('german', 'german'), ('french', 'french'), ('hindi', 'hindi'), ('swiss', 'swiss')], null=True), + ), + migrations.AddField( + model_name='columnmetadata', + name='num_grouping', + field=models.CharField(choices=[('always', 'always'), ('auto', 'auto'), ('never', 'never')], null=True), + ), + ] diff --git a/mathesar/migrations/0015_tablemetadata_data_file.py b/mathesar/migrations/0015_tablemetadata_data_file.py new file mode 100644 index 0000000000..ab9c0e7eaf --- /dev/null +++ b/mathesar/migrations/0015_tablemetadata_data_file.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.11 on 2024-09-10 12:25 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0014_remove_columnmetadata_duration_show_units_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='tablemetadata', + name='data_file', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='mathesar.datafile'), + ), + ] diff --git a/mathesar/migrations/0016_alter_explorations_display_names.py b/mathesar/migrations/0016_alter_explorations_display_names.py new file mode 100644 index 0000000000..4d7cd53765 --- /dev/null +++ b/mathesar/migrations/0016_alter_explorations_display_names.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.11 on 2024-09-18 14:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0015_tablemetadata_data_file'), + ] + + operations = [ + migrations.AlterField( + model_name='explorations', + name='display_names', + field=models.JSONField(null=True), + ), + ] diff --git a/mathesar/migrations/0017_explorations_schema_oid.py b/mathesar/migrations/0017_explorations_schema_oid.py new file mode 100644 index 0000000000..9e55c7b19d --- /dev/null +++ b/mathesar/migrations/0017_explorations_schema_oid.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.11 on 2024-09-26 18:13 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('mathesar', '0016_alter_explorations_display_names'), + ] + + operations = [ + migrations.AddField( + model_name='explorations', + name='schema_oid', + field=models.PositiveBigIntegerField(), + ), + ] diff --git a/mathesar/models/base.py b/mathesar/models/base.py index 60b2b5059b..83b01e331b 100644 --- a/mathesar/models/base.py +++ b/mathesar/models/base.py @@ -1,60 +1,6 @@ -from functools import reduce - -from bidict import bidict - -from django.conf import settings -from django.core.cache import cache -from django.core.exceptions import ValidationError from django.db import models -from django.db.models import JSONField from encrypted_fields.fields import EncryptedCharField -from db.columns import utils as column_utils -from db.columns.operations.create import create_column, duplicate_column -from db.columns.operations.alter import alter_column -from db.columns.operations.drop import drop_column -from db.columns.operations.select import ( - get_column_description, - get_column_attnum_from_names_as_map, get_column_name_from_attnum, - get_map_of_attnum_to_column_name, get_map_of_attnum_and_table_oid_to_column_name, -) -from db.constraints.operations.create import add_constraint -from db.constraints.operations.drop import drop_constraint -from db.constraints.operations.select import get_constraint_record_from_oid -from db.constraints import utils as constraint_utils -from db.dependents.dependents_utils import get_dependents_graph, has_dependents -from db.metadata import get_empty_metadata -from db.records.operations.delete import bulk_delete_records, delete_record -from db.records.operations.insert import insert_record_or_records -from db.records.operations.select import get_column_cast_records, get_count, get_record -from db.records.operations.select import get_records -from db.records.operations.update import update_record -from db.schemas.operations.drop import drop_schema -from db.schemas.operations.select import get_schema_description -from db.schemas import utils as schema_utils -from db.tables import utils as table_utils -from db.tables.operations.drop import drop_table -from db.tables.operations.move_columns import move_columns_between_related_tables -from db.tables.operations.select import ( - get_oid_from_table, - reflect_table_from_oid, - get_table_description, - reflect_tables_from_oids -) -from db.tables.operations.split import extract_columns_from_table -from db.records.operations.insert import insert_from_select -from db.tables.utils import get_primary_key_column - -from mathesar.models.relation import Relation -from mathesar.utils import models as model_utils -from mathesar.utils.prefetch import PrefetchManager, Prefetcher -from mathesar.database.base import create_mathesar_engine -from mathesar.database.types import UIType, get_ui_type_from_db_type -from mathesar.state import make_sure_initial_reflection_happened, get_cached_metadata, reset_reflection -from mathesar.state.cached_property import cached_property -from mathesar.api.exceptions.database_exceptions.base_exceptions import ProgrammingAPIException - - -NAME_CACHE_INTERVAL = 60 * 5 +import psycopg class BaseModel(models.Model): @@ -65,918 +11,149 @@ class Meta: abstract = True -class DatabaseObjectManager(PrefetchManager): - def get_queryset(self): - make_sure_initial_reflection_happened() - return super().get_queryset() - - -class ReflectionManagerMixin(models.Model): - """ - Used to reflect objects that exists on the user database but does not have a equivalent mathesar reference object. - """ - # The default manager, current_objects, does not reflect database objects. - # This saves us from having to deal with Django trying to automatically reflect db - # objects in the background when we might not expect it. - current_objects = models.Manager() - objects = DatabaseObjectManager() - - class Meta: - abstract = True - - def __str__(self): - return f"{self.__class__.__name__}" - - -class DatabaseObject(ReflectionManagerMixin, BaseModel): - """ - Objects that can be referenced using a database identifier - """ - oid = models.PositiveIntegerField() - - class Meta: - abstract = True - - def __str__(self): - return f"{self.__class__.__name__}: {self.oid}" - - def __repr__(self): - return f'<{self.__class__.__name__}: {self.oid}>' - - -# TODO: Replace with a proper form of caching -# See: https://github.com/centerofci/mathesar/issues/280 -_engine_cache = {} - - -class Database(ReflectionManagerMixin, BaseModel): - name = models.CharField(max_length=128, unique=True) - db_name = models.CharField(max_length=128) - username = EncryptedCharField(max_length=255) - password = EncryptedCharField(max_length=255) +class Server(BaseModel): host = models.CharField(max_length=255) port = models.IntegerField() - current_objects = models.Manager() - # TODO does this need to be defined, given that ReflectionManagerMixin defines an identical attribute? - objects = DatabaseObjectManager() - deleted = models.BooleanField(blank=True, default=False) - - @property - def _sa_engine(self): - global _engine_cache - # We're caching this since the engine is used frequently. - db_name = self.name - was_cached = db_name in _engine_cache - if was_cached: - engine = _engine_cache.get(db_name) - model_utils.ensure_cached_engine_ready(engine) - else: - engine = create_mathesar_engine(self) - _engine_cache[db_name] = engine - return engine - - @property - def supported_ui_types(self): - """ - At the moment we don't actually filter our UIType set based on whether or not a UIType's - constituent DB types are supported. - """ - return UIType - - def __repr__(self): - return f'{self.__class__.__name__}: {self.name}, {self.id}' - - @classmethod - def create_from_settings_key(cls, db_key): - """ - Get an ethereal instance of the model from Django settings. - - This is only supported for Postgres DBs (e.g., it won't work on an - SQLite3 internal DB; that returns NoneType) - - Args: - db_key: This should be the key of the DB in settings.DATABASES - """ - db_info = settings.DATABASES[db_key] - if 'postgres' in db_info['ENGINE']: - return cls( - name=db_key, - db_name=db_info['NAME'], - username=db_info['USER'], - password=db_info['PASSWORD'], - host=db_info['HOST'], - port=db_info['PORT'], - ) - - def save(self, **kwargs): - db_name = self.name - # invalidate cached engine as db credentials might get changed. - if _engine_cache.get(db_name): - _engine_cache[db_name].dispose() - del _engine_cache[db_name] - return super().save() - - -class Schema(DatabaseObject): - database = models.ForeignKey('Database', on_delete=models.CASCADE, - related_name='schemas') class Meta: constraints = [ - models.UniqueConstraint(fields=["oid", "database"], name="unique_schema") + models.UniqueConstraint( + fields=["host", "port"], name="unique_server" + ), ] - @property - def _sa_engine(self): - return self.database._sa_engine - - @property - def name(self): - cache_key = f"{self.database.name}_schema_name_{self.oid}" - try: - schema_name = cache.get(cache_key) - if schema_name is None: - schema_name = schema_utils.get_schema_name_from_oid( - self.oid, self._sa_engine - ) - cache.set(cache_key, schema_name, NAME_CACHE_INTERVAL) - return schema_name - # We catch this error, since it lets us decouple the cadence of - # overall DB reflection from the cadence of cache expiration for - # schema names. Also, it makes it obvious when the DB layer has - # been altered, as opposed to other reasons for a 404 when - # requesting a schema. - except TypeError: - return 'MISSING' - @property - def has_dependents(self): - return has_dependents( - self.oid, - self._sa_engine - ) - - # Returns only schema-scoped dependents on the top level - # However, returns dependents from other schemas for other - # objects down the graph. - # E.g: TableA from SchemaA depends on TableB from SchemaB - # SchemaA won't return as a dependent for SchemaB, however - # TableA will be a dependent of TableB which in turn depends on its schema - def get_dependents(self, exclude=None): - if exclude is None: - exclude = [] - return get_dependents_graph( - self.oid, - self._sa_engine, - exclude - ) - - @property - def description(self): - return get_schema_description(self.oid, self._sa_engine) - - def update_sa_schema(self, update_params): - result = model_utils.update_sa_schema(self, update_params) - reset_reflection(db_name=self.database.name) - return result - - def delete_sa_schema(self): - result = drop_schema(self.name, self._sa_engine, cascade=True) - reset_reflection(db_name=self.database.name) - return result - - def clear_name_cache(self): - cache_key = f"{self.database.name}_schema_name_{self.oid}" - cache.delete(cache_key) - - -class ColumnNamePrefetcher(Prefetcher): - def filter(self, column_attnums, columns): - if len(columns) < 1: - return [] - table = list(columns)[0].table - return get_map_of_attnum_to_column_name( - table.oid, - column_attnums, - table._sa_engine, - metadata=get_cached_metadata(), - ) - - def mapper(self, column): - return column.attnum - - def reverse_mapper(self, column): - # We return maps mostly, so a reverse mapper is not needed - pass - - def decorator(self, column, name): - setattr(column, 'name', name) - - -class ColumnPrefetcher(Prefetcher): - def filter(self, table_ids, tables): - if len(tables) < 1: - return [] - columns = reduce(lambda column_objs, table: column_objs + list(table.columns.all()), tables, []) - table_oids = [table.oid for table in tables] - - def _get_column_names_from_tables(table_oids): - if len(tables) > 0: - engine = list(tables)[0]._sa_engine - else: - return [] - return get_map_of_attnum_and_table_oid_to_column_name( - table_oids, - engine=engine, - metadata=get_cached_metadata(), - ) - - return ColumnNamePrefetcher( - filter=lambda column_attnums, columns: _get_column_names_from_tables(table_oids), - mapper=lambda column: (column.attnum, column.table.oid) - ).fetch(columns, 'columns__name', Column, []) - - def reverse_mapper(self, column): - return [column.table_id] - - def decorator(self, table, columns): - pass - - -_sa_table_prefetcher = Prefetcher( - filter=lambda oids, tables: reflect_tables_from_oids( - oids, list(tables)[0]._sa_engine, metadata=get_cached_metadata() - ) if len(tables) > 0 else [], - mapper=lambda table: table.oid, - # A filler statement, just used to satisfy the library. It does not affect the prefetcher in - # any way as we bypass reverse mapping if the prefetcher returns a dictionary - reverse_mapper=lambda table: table.oid, - decorator=lambda table, _sa_table: setattr( - table, - '_sa_table', - _sa_table +class Database(BaseModel): + name = models.CharField(max_length=128) + server = models.ForeignKey( + 'Server', on_delete=models.CASCADE, related_name='databases' ) -) - - -class Table(DatabaseObject, Relation): - # These are fields whose source of truth is in the model - MODEL_FIELDS = ['import_verified'] - current_objects = models.Manager() - objects = DatabaseObjectManager( - # TODO Move the Prefetcher into a separate class and replace lambdas with proper function - _sa_table=_sa_table_prefetcher, - columns=ColumnPrefetcher, - ) - schema = models.ForeignKey('Schema', on_delete=models.CASCADE, - related_name='tables') - import_verified = models.BooleanField(blank=True, null=True) - import_target = models.ForeignKey('Table', blank=True, null=True, on_delete=models.SET_NULL) - is_temp = models.BooleanField(blank=True, null=True) class Meta: constraints = [ - models.UniqueConstraint(fields=["oid", "schema"], name="unique_table") - ] - - def validate_unique(self, exclude=None): - # Ensure oid is unique on db level - if Table.current_objects.filter( - oid=self.oid, schema__database=self.schema.database - ).exists(): - raise ValidationError("Table OID is not unique") - super().validate_unique(exclude=exclude) - - def save(self, *args, **kwargs): - if self._state.adding: - self.validate_unique() - super().save(*args, **kwargs) - - # TODO referenced from outside so much that it probably shouldn't be private - # TODO use below decorator in place of cached_property to prevent redundant reflection from - # redundant model instances. - # - # @key_cached_property( - # key_fn=lambda table: ( - # 'sa_table', - # table.schema.database.name, - # table.oid, - # ) - # ) - @cached_property - def _sa_table(self): - # We're caching since we want different Django Table instances to return the same SA - # Table, when they're referencing the same Postgres table. - try: - sa_table = reflect_table_from_oid( - oid=self.oid, - engine=self._sa_engine, - metadata=get_cached_metadata(), + models.UniqueConstraint( + fields=["name", "server"], name="unique_database" + ), + models.UniqueConstraint( + fields=["id", "server"], name="database_id_server_index" ) - # We catch these errors, since it lets us decouple the cadence of - # overall DB reflection from the cadence of cache expiration for - # table names. Also, it makes it obvious when the DB layer has - # been altered, as opposed to other reasons for a 404 when - # requesting a table. - except (TypeError, IndexError): - sa_table = table_utils.get_empty_table("MISSING") - return sa_table - - # NOTE: it's a problem that we have both _sa_table and _enriched_column_sa_table. at the moment - # it has to be this way because enriched column is not always interachangeable with sa column. - @property - def _enriched_column_sa_table(self): - return column_utils.get_enriched_column_table( - table=self._sa_table, - engine=self._sa_engine, - metadata=get_empty_metadata(), - ) - - @property - def primary_key_column_name(self): - pk_column = get_primary_key_column(self._sa_table) - return pk_column.name - - @property - def sa_columns(self): - return self._enriched_column_sa_table.columns - - @property - def _sa_engine(self): - return self.schema._sa_engine - - @property - def name(self): - return self._sa_table.name - - @property - def sa_column_names(self): - return self.sa_columns.keys() - - @property - def sa_constraints(self): - return self._sa_table.constraints - - @property - def has_dependents(self): - return has_dependents( - self.oid, - self.schema._sa_engine - ) - - @property - def description(self): - return get_table_description(self.oid, self._sa_engine) - - def get_dependents(self, exclude=None): - if exclude is None: - exclude = [] - return get_dependents_graph( - self.oid, - self.schema._sa_engine, - exclude - ) - - def get_ui_dependents(self): - """ - Returns all service layer dependents. For now only Data Explorer Query is considered - """ - - return { - 'queries': self.queries.all().values_list('id', flat=True) - } - - def add_column(self, column_data): - result = create_column( - self.schema._sa_engine, - self.oid, - column_data, - ) - reset_reflection(db_name=self.schema.database.name) - return result - - def alter_column(self, column_attnum, column_data): - result = alter_column( - self.schema._sa_engine, - self.oid, - column_attnum, - column_data, - ) - reset_reflection(db_name=self.schema.database.name) - return result - - def drop_column(self, column_attnum): - drop_column( - self.oid, - column_attnum, - self.schema._sa_engine, - ) - reset_reflection(db_name=self.schema.database.name) - - def duplicate_column(self, column_attnum, copy_data, copy_constraints, name=None): - result = duplicate_column( - self.oid, - column_attnum, - self.schema._sa_engine, - new_column_name=name, - copy_data=copy_data, - copy_constraints=copy_constraints, - ) - reset_reflection(db_name=self.schema.database.name) - return result - - def get_preview(self, column_definitions): - return get_column_cast_records( - self.schema._sa_engine, self._sa_table, column_definitions - ) - - # TODO unused? delete if so - @property - def sa_all_records(self): - return get_records( - table=self._sa_table, - engine=self.schema._sa_engine, - fallback_to_default_ordering=True, - ) - - def sa_num_records(self, filter=None, search=None): - if search is None: - search = [] - return get_count( - table=self._sa_table, - engine=self.schema._sa_engine, - filter=filter, - search=search, - ) - - def update_sa_table(self, update_params): - result = model_utils.update_sa_table(self, update_params) - return result - - def delete_sa_table(self): - result = drop_table(self.name, self.schema.name, self.schema._sa_engine, cascade=True) - reset_reflection(db_name=self.schema.database.name) - return result - - def get_record(self, id_value): - return get_record(self._sa_table, self.schema._sa_engine, id_value) - - # TODO consider using **kwargs instead of forwarding params one-by-one - def get_records( - self, - limit=None, - offset=None, - filter=None, - order_by=None, - group_by=None, - search=None, - duplicate_only=None, - ): - if order_by is None: - order_by = [] - if search is None: - search = [] - return get_records( - table=self._sa_table, - engine=self.schema._sa_engine, - limit=limit, - offset=offset, - filter=filter, - order_by=order_by, - group_by=group_by, - search=search, - duplicate_only=duplicate_only, - fallback_to_default_ordering=True, - ) - - def create_record_or_records(self, record_data): - return insert_record_or_records(self._sa_table, self.schema._sa_engine, record_data) - - def update_record(self, id_value, record_data): - return update_record(self._sa_table, self.schema._sa_engine, id_value, record_data) - - def delete_record(self, id_value): - return delete_record(self._sa_table, self.schema._sa_engine, id_value) - - def bulk_delete_records(self, id_values): - return bulk_delete_records(self._sa_table, self.schema._sa_engine, id_values) - - def add_constraint(self, constraint_obj): - # The max here has the effect of filtering for the largest OID, which is - # the most newly-created constraint. Other methods (e.g., trying to get - # a constraint by name when it wasn't set here) are even less robust. - constraint_oid = max( - add_constraint(constraint_obj, engine=self._sa_engine) - ) - result = Constraint.current_objects.create(oid=constraint_oid, table=self) - reset_reflection(db_name=self.schema.database.name) - return result - - def get_column_name_id_bidirectional_map(self): - columns = Column.objects.filter(table_id=self.id).select_related('table__schema__database').prefetch('name') - columns_map = bidict({column.name: column.id for column in columns}) - return columns_map - - def get_column_name_type_map(self): - columns = Column.objects.filter(table_id=self.id) - columns_map = [(column.name, column.db_type) for column in columns] - return columns_map - - def get_column_by_name(self, name): - columns = self.get_columns_by_name(name_list=[name]) - if len(columns) > 0: - return columns[0] - - def get_columns_by_name(self, name_list): - columns_by_name_dict = { - col.name: col - for col - in Column.objects.filter(table=self) - if col.name in name_list - } - return [ - columns_by_name_dict[col_name] - for col_name - in name_list - ] - - def move_columns(self, columns_to_move, target_table): - # Collect various information about relevant columns before mutating - columns_attnum_to_move = [column.attnum for column in columns_to_move] - target_table_oid = target_table.oid - column_names_to_move = [column.name for column in columns_to_move] - target_columns_name_id_map = target_table.get_column_name_id_bidirectional_map() - column_names_id_map = self.get_column_name_id_bidirectional_map() - - # Mutate on Postgres - extracted_sa_table, remainder_sa_table = move_columns_between_related_tables( - source_table_oid=self.oid, - target_table_oid=target_table_oid, - column_attnums_to_move=columns_attnum_to_move, - schema=self.schema.name, - engine=self._sa_engine - ) - engine = self._sa_engine - - # Replicate mutation on Django, so that Django-layer-specific information is preserved - extracted_table_oid = get_oid_from_table(extracted_sa_table.name, extracted_sa_table.schema, engine) - remainder_table_oid = get_oid_from_table(remainder_sa_table.name, remainder_sa_table.schema, engine) - target_table.oid = extracted_table_oid - target_table.save() - # Refresh existing target table columns to use correct attnum preventing conflicts with the moved column - existing_target_column_names = target_columns_name_id_map.keys() - target_table.update_column_reference(existing_target_column_names, target_columns_name_id_map) - # Add the moved column - target_table.update_column_reference(column_names_to_move, column_names_id_map) - self.oid = remainder_table_oid - self.save() - remainder_column_names = column_names_id_map.keys() - column_names_to_move - self.update_column_reference(remainder_column_names, column_names_id_map) - reset_reflection(db_name=self.schema.database.name) - return extracted_sa_table, remainder_sa_table - - def split_table( - self, - columns_to_extract, - extracted_table_name, - column_names_id_map, - relationship_fk_column_name - ): - # Collect various information about relevant columns before mutating - columns_attnum_to_extract = [column.attnum for column in columns_to_extract] - extracted_column_names = [column.name for column in columns_to_extract] - remainder_column_names = column_names_id_map.keys() - extracted_column_names - - # Mutate on Postgres - extracted_table_oid, remainder_table_oid, linking_fk_column_attnum = extract_columns_from_table( - self.oid, - columns_attnum_to_extract, - extracted_table_name, - self.schema.name, - self._sa_engine, - relationship_fk_column_name - ) - # Replicate mutation on Django, so that Django-layer-specific information is preserved - extracted_table = Table(oid=extracted_table_oid, schema=self.schema) - extracted_table.save() - - # Update attnum as it would have changed due to columns moving to a new table. - extracted_table.update_column_reference(extracted_column_names, column_names_id_map) - remainder_table = Table.current_objects.get(schema__database=self.schema.database, oid=remainder_table_oid) - remainder_table.update_column_reference(remainder_column_names, column_names_id_map) - reset_reflection(db_name=self.schema.database.name) - remainder_fk_column = Column.objects.get(table=remainder_table, attnum=linking_fk_column_attnum) - - return extracted_table, remainder_table, remainder_fk_column - - def update_column_reference(self, column_names, column_name_id_map): - """ - Will update the columns specified via column_names to have the right attnum and to be part - of this table. - """ - column_names_attnum_map = get_column_attnum_from_names_as_map( - self.oid, - column_names, - self._sa_engine, - metadata=get_cached_metadata(), - ) - column_objs = [] - for column_name, column_attnum in column_names_attnum_map.items(): - column_id = column_name_id_map[column_name] - column = Column.current_objects.get(id=column_id) - column.table_id = self.id - column.attnum = column_attnum - column_objs.append(column) - Column.current_objects.bulk_update(column_objs, fields=['table_id', 'attnum']) - - def insert_records_to_existing_table(self, existing_table, data_files, mappings=None): - from_table = self._sa_table - target_table = existing_table._sa_table - engine = self._sa_engine - if mappings: - col_mappings = [[from_col.name, target_col.name] for from_col, target_col in mappings] - else: - col_mappings = None - data_file = data_files[0] - try: - table, _ = insert_from_select(from_table, target_table, engine, col_mappings) - data_file.table_imported_to = existing_table - except Exception as e: - # ToDo raise specific exceptions. - raise e - return table - - def suggest_col_mappings_for_import(self, existing_table): - temp_table_col_list = self.get_column_name_type_map() - target_table_col_list = existing_table.get_column_name_type_map() - temp_table_name_id_map = self.get_column_name_id_bidirectional_map() - target_table_name_id_map = existing_table.get_column_name_id_bidirectional_map() - column_mappings = column_utils.find_match(temp_table_col_list, target_table_col_list, self._sa_engine) - - # Convert python list indices to django ids. - mappings = [ - ( - temp_table_name_id_map[ - temp_table_col_list[from_col][0] # from_column name - ], - target_table_name_id_map[ - target_table_col_list[target_col][0] # target_column name - ] - ) for from_col, target_col in column_mappings ] - return mappings -class Column(ReflectionManagerMixin, BaseModel): - table = models.ForeignKey('Table', on_delete=models.CASCADE, related_name='columns') - attnum = models.IntegerField() - display_options = JSONField(null=True, default=None) +class ConfiguredRole(BaseModel): + name = models.CharField(max_length=255) + server = models.ForeignKey( + 'Server', on_delete=models.CASCADE, related_name='roles' + ) + password = EncryptedCharField(max_length=255) class Meta: constraints = [ - models.UniqueConstraint(fields=["attnum", "table"], name="unique_column") + models.UniqueConstraint( + fields=["name", "server"], name="unique_role" + ), + models.UniqueConstraint( + fields=["id", "server"], name="role_id_server_index" + ) ] - def __str__(self): - return f"{self.__class__.__name__}: {self.table_id}-{self.attnum}" - - def __getattribute__(self, name): - try: - return super().__getattribute__(name) - except AttributeError as e: - # Blacklist Django attribute names that cause recursion by trying to fetch an invalid cache. - # TODO Find a better way to avoid finding Django related columns - blacklisted_attribute_names = ['resolve_expression', '_prefetched_objects_cache'] - if name not in blacklisted_attribute_names: - return getattr(self._sa_column, name) - else: - raise e - - current_objects = models.Manager() - objects = DatabaseObjectManager( - name=ColumnNamePrefetcher - ) - - @property - def _sa_engine(self): - return self.table._sa_engine - # TODO probably shouldn't be private: a lot of code already references it. - @property - def _sa_column(self): - return self.table.sa_columns[self.name] +class UserDatabaseRoleMap(BaseModel): + user = models.ForeignKey('User', on_delete=models.CASCADE) + database = models.ForeignKey('Database', on_delete=models.CASCADE) + configured_role = models.ForeignKey('ConfiguredRole', on_delete=models.CASCADE) + server = models.ForeignKey('Server', on_delete=models.CASCADE) - # TODO use below decorator in place of cached_property to prevent redundant reflection from - # redundant model instances. - # - # @key_cached_property( - # key_fn=lambda column: ( - # "column name", - # column.table.schema.database.name, - # column.table.schema.name, - # column.table.oid, - # column.attnum, - # ) - # ) - @cached_property - def name(self): - name = get_column_name_from_attnum( - self.table.oid, - self.attnum, - self._sa_engine, - metadata=get_cached_metadata(), - ) - assert type(name) is str - if name is None: - raise ProgrammingAPIException( - Exception( - "attempted to access column's name after it was dropped" - ) + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["user", "database"], name="user_one_role_per_database" ) - else: - return name - - @property - def description(self): - return get_column_description(self.table.oid, self.attnum, self._sa_engine) - - @property - def ui_type(self): - if self.db_type: - return get_ui_type_from_db_type(self.db_type) - - @property - def db_type(self): - return self._sa_column.db_type + ] @property - def has_dependents(self): - return has_dependents( - self.table.oid, - self._sa_engine, - self.attnum - ) - - def get_dependents(self, exclude): - return get_dependents_graph( - self.table.oid, - self._sa_engine, - exclude, - self.attnum + def connection(self): + return psycopg.connect( + host=self.server.host, + port=self.server.port, + dbname=self.database.name, + user=self.configured_role.name, + password=self.configured_role.password, ) -class Constraint(DatabaseObject): - table = models.ForeignKey('Table', on_delete=models.CASCADE, related_name='constraints') +class ColumnMetaData(BaseModel): + database = models.ForeignKey('Database', on_delete=models.CASCADE) + table_oid = models.PositiveBigIntegerField() + attnum = models.SmallIntegerField() + bool_input = models.CharField( + choices=[("dropdown", "dropdown"), ("checkbox", "checkbox")], + null=True + ) + bool_true = models.CharField(null=True) + bool_false = models.CharField(null=True) + num_min_frac_digits = models.PositiveIntegerField(null=True) + num_max_frac_digits = models.PositiveIntegerField(null=True) + num_grouping = models.CharField( + choices=[("always", "always"), ("auto", "auto"), ("never", "never")], + null=True + ) + num_format = models.CharField( + choices=[("english", "english"), ("german", "german"), ("french", "french"), ("hindi", "hindi"), ("swiss", "swiss")], + null=True + ) + mon_currency_symbol = models.CharField(null=True) + mon_currency_location = models.CharField( + choices=[("after-minus", "after-minus"), ("end-with-space", "end-with-space")], + null=True + ) + time_format = models.CharField(null=True) + date_format = models.CharField(null=True) + duration_min = models.CharField(max_length=255, null=True) + duration_max = models.CharField(max_length=255, null=True) class Meta: constraints = [ - models.UniqueConstraint(fields=["oid", "table"], name="unique_constraint") + models.UniqueConstraint( + fields=["database", "table_oid", "attnum"], + name="unique_column_metadata" + ), + models.CheckConstraint( + check=( + models.Q(num_max_frac_digits__lte=20) + & models.Q(num_min_frac_digits__lte=20) + & models.Q(num_min_frac_digits__lte=models.F("num_max_frac_digits")) + ), + name="frac_digits_integrity" + ) ] - # TODO try to cache this for an entire request - @property - def _constraint_record(self): - engine = self.table.schema.database._sa_engine - return get_constraint_record_from_oid(self.oid, engine, get_cached_metadata()) - - @property - def name(self): - return self._constraint_record['conname'] - - @property - def type(self): - return constraint_utils.get_constraint_type_from_char(self._constraint_record['contype']) - - @cached_property - def columns(self): - column_attnum_list = self._constraint_record['conkey'] - return Column.objects.filter(table=self.table, attnum__in=column_attnum_list).order_by("attnum") - - @cached_property - def referent_columns(self): - column_attnum_list = self._constraint_record['confkey'] - if column_attnum_list: - foreign_relation_oid = self._constraint_record['confrelid'] - columns = Column.objects.filter( - table__oid=foreign_relation_oid, - table__schema=self.table.schema, - attnum__in=column_attnum_list - ).order_by("attnum") - return columns - - @property - def ondelete(self): - action_char = self._constraint_record['confdeltype'] - return constraint_utils.get_constraint_action_from_char(action_char) - - @property - def onupdate(self): - action_char = self._constraint_record['confupdtype'] - return constraint_utils.get_constraint_action_from_char(action_char) - - @property - def deferrable(self): - return self._constraint_record['condeferrable'] - - @property - def match(self): - type_char = self._constraint_record['confmatchtype'] - return constraint_utils.get_constraint_match_type_from_char(type_char) - - def drop(self): - drop_constraint( - self.table._sa_table.name, - self.table._sa_table.schema, - self.table.schema._sa_engine, - self.name - ) - self.delete() - reset_reflection(db_name=self.table.schema.database.name) - - -class DataFile(BaseModel): - created_from_choices = models.TextChoices("created_from", "FILE PASTE URL") - file_type_choices = models.TextChoices("type", "CSV TSV JSON") - - file = models.FileField(upload_to=model_utils.user_directory_path) - user = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.CASCADE) - created_from = models.CharField(max_length=128, choices=created_from_choices.choices) - type = models.CharField(max_length=128, choices=file_type_choices.choices) - table_imported_to = models.ForeignKey(Table, related_name="data_files", blank=True, - null=True, on_delete=models.SET_NULL) - - base_name = models.CharField(max_length=100) - header = models.BooleanField(default=True) - max_level = models.IntegerField(default=0, blank=True) - sheet_index = models.IntegerField(default=0) - delimiter = models.CharField(max_length=1, default=',', blank=True) - escapechar = models.CharField(max_length=1, blank=True) - quotechar = models.CharField(max_length=1, default='"', blank=True) - - -class PreviewColumnSettings(BaseModel): - customized = models.BooleanField() - template = models.CharField(max_length=255) - -def validate_column_order(value): - """ - Custom validator to ensure that all elements in the list are positive integers. - """ - if not all(isinstance(item, int) and item > 0 for item in value): - raise ValidationError("All elements of column order must be positive integers.") - - -class TableSettings(ReflectionManagerMixin, BaseModel): - preview_settings = models.OneToOneField(PreviewColumnSettings, on_delete=models.CASCADE) - table = models.OneToOneField(Table, on_delete=models.CASCADE, related_name="settings") - column_order = JSONField(null=True, blank=True, default=None, validators=[validate_column_order]) - - def save(self, **kwargs): - # Cleans the fields before saving by running respective field validator(s) - try: - self.clean_fields() - except ValidationError as e: - raise e - super().save(**kwargs) - - -def _create_table_settings(tables): - # TODO Bulk create preview settings to improve performance - for table in tables: - preview_column_settings = PreviewColumnSettings.objects.create(customized=False) - TableSettings.current_objects.create(table=table, preview_settings=preview_column_settings) - - -def _set_default_preview_template(table): - if not table.settings.preview_settings.customized: - preview_template = compute_default_preview_template(table) - preview_settings = table.settings.preview_settings - preview_settings.template = preview_template - preview_settings.save() +class TableMetaData(BaseModel): + database = models.ForeignKey('Database', on_delete=models.CASCADE) + table_oid = models.PositiveBigIntegerField() + data_file = models.ForeignKey("DataFile", on_delete=models.SET_NULL, null=True) + import_verified = models.BooleanField(null=True) + column_order = models.JSONField(null=True) + record_summary_customized = models.BooleanField(null=True) + record_summary_template = models.CharField(max_length=255, null=True) + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["database", "table_oid"], + name="unique_table_metadata" + ) + ] -def compute_default_preview_template(table): - columns = Column.current_objects.filter(table=table).prefetch_related( - 'table', - 'table__schema', - 'table__schema__database' - ).order_by('attnum') - preview_column = None - primary_key_column = None - for column in columns: - if column.primary_key: - primary_key_column = column - else: - preview_column = column - break - if preview_column is None: - preview_column = primary_key_column - if preview_column: - preview_template = f"{{{preview_column.id}}}" - else: - # The table does not contain any column, show blank in such scenario. - preview_template = "" - return preview_template +class Explorations(BaseModel): + database = models.ForeignKey('Database', on_delete=models.CASCADE) + name = models.CharField(max_length=128, unique=True) + base_table_oid = models.PositiveBigIntegerField() + schema_oid = models.PositiveBigIntegerField() + initial_columns = models.JSONField() + transformations = models.JSONField(null=True) + display_options = models.JSONField(null=True) + display_names = models.JSONField(null=True) + description = models.CharField(null=True) diff --git a/mathesar/models/deprecated.py b/mathesar/models/deprecated.py new file mode 100644 index 0000000000..68087f3fae --- /dev/null +++ b/mathesar/models/deprecated.py @@ -0,0 +1,974 @@ +from functools import reduce + +from bidict import bidict + +from django.conf import settings +from django.core.cache import cache +from django.core.exceptions import ValidationError +from django.db import models +from django.db.models import JSONField +from encrypted_fields.fields import EncryptedCharField +from db.columns import utils as column_utils +from db.columns.operations.create import create_column, duplicate_column +from db.columns.operations.alter import alter_column +from db.columns.operations.drop import drop_column +from db.columns.operations.select import ( + get_column_description, + get_column_attnum_from_names_as_map, get_column_name_from_attnum, + get_map_of_attnum_to_column_name, get_map_of_attnum_and_table_oid_to_column_name, +) +from db.constraints.operations.create import add_constraint_via_sql_alchemy +from db.constraints.operations.drop import drop_constraint +from db.constraints.operations.select import get_constraint_record_from_oid +from db.constraints import utils as constraint_utils +from db.dependents.dependents_utils import get_dependents_graph, has_dependents +from db.metadata import get_empty_metadata +from db.records.operations.delete import bulk_delete_records, delete_record +from db.records.operations.insert import insert_record_or_records +from db.records.operations.select import get_column_cast_records, get_count, get_record +from db.records.operations.select import get_records +from db.records.operations.update import update_record +from db.schemas.operations.drop import drop_schema_via_name +from db.schemas.operations.select import get_schema_description +from db.schemas import utils as schema_utils +from db.tables import utils as table_utils +from db.tables.operations.drop import drop_table +from db.tables.operations.move_columns import move_columns_between_related_tables +from db.tables.operations.select import ( + get_oid_from_table, + reflect_table_from_oid, + get_table_description, + reflect_tables_from_oids +) +from db.tables.operations.split import extract_columns_from_table +from db.records.operations.insert import insert_from_select +from db.tables.utils import get_primary_key_column + +from mathesar.models.base import BaseModel +from mathesar.models.relation import Relation +from mathesar.utils import models as model_utils +from mathesar.utils.prefetch import PrefetchManager, Prefetcher +from mathesar.database.base import create_mathesar_engine +from mathesar.database.types import UIType, get_ui_type_from_db_type +from mathesar.state import make_sure_initial_reflection_happened, get_cached_metadata, reset_reflection +from mathesar.state.cached_property import cached_property +from mathesar.api.exceptions.database_exceptions.base_exceptions import ProgrammingAPIException + + +NAME_CACHE_INTERVAL = 60 * 5 + + +class DatabaseObjectManager(PrefetchManager): + def get_queryset(self): + make_sure_initial_reflection_happened() + return super().get_queryset() + + +class ReflectionManagerMixin(models.Model): + """ + Used to reflect objects that exists on the user database but does not have a equivalent mathesar reference object. + """ + # The default manager, current_objects, does not reflect database objects. + # This saves us from having to deal with Django trying to automatically reflect db + # objects in the background when we might not expect it. + current_objects = models.Manager() + objects = DatabaseObjectManager() + + class Meta: + abstract = True + + def __str__(self): + return f"{self.__class__.__name__}" + + +class DatabaseObject(ReflectionManagerMixin, BaseModel): + """ + Objects that can be referenced using a database identifier + """ + oid = models.PositiveIntegerField() + + class Meta: + abstract = True + + def __str__(self): + return f"{self.__class__.__name__}: {self.oid}" + + def __repr__(self): + return f'<{self.__class__.__name__}: {self.oid}>' + + +# TODO: Replace with a proper form of caching +# See: https://github.com/centerofci/mathesar/issues/280 +_engine_cache = {} + + +class Connection(ReflectionManagerMixin, BaseModel): + name = models.CharField(max_length=128, unique=True) + db_name = models.CharField(max_length=128) + username = EncryptedCharField(max_length=255) + password = EncryptedCharField(max_length=255) + host = models.CharField(max_length=255) + port = models.IntegerField() + current_objects = models.Manager() + # TODO does this need to be defined, given that ReflectionManagerMixin defines an identical attribute? + objects = DatabaseObjectManager() + deleted = models.BooleanField(blank=True, default=False) + + @property + def _sa_engine(self): + global _engine_cache + # We're caching this since the engine is used frequently. + db_name = self.name + was_cached = db_name in _engine_cache + if was_cached: + engine = _engine_cache.get(db_name) + model_utils.ensure_cached_engine_ready(engine) + else: + engine = create_mathesar_engine(self) + _engine_cache[db_name] = engine + return engine + + @property + def supported_ui_types(self): + """ + At the moment we don't actually filter our UIType set based on whether or not a UIType's + constituent DB types are supported. + """ + return UIType + + def __repr__(self): + return f'{self.__class__.__name__}: {self.name}, {self.id}' + + @classmethod + def create_from_settings_key(cls, db_key): + """ + Get an ethereal instance of the model from Django settings. + + This is only supported for Postgres DBs (e.g., it won't work on an + SQLite3 internal DB; that returns NoneType) + + Args: + db_key: This should be the key of the DB in settings.DATABASES + """ + db_info = settings.DATABASES[db_key] + if 'postgres' in db_info['ENGINE']: + return cls( + name=db_key, + db_name=db_info['NAME'], + username=db_info['USER'], + password=db_info['PASSWORD'], + host=db_info['HOST'], + port=db_info['PORT'], + ) + + def save(self, **kwargs): + db_name = self.name + # invalidate cached engine as db credentials might get changed. + if _engine_cache.get(db_name): + _engine_cache[db_name].dispose() + del _engine_cache[db_name] + return super().save() + + +class Schema(DatabaseObject): + database = models.ForeignKey('Connection', on_delete=models.CASCADE, + related_name='schemas') + + class Meta: + constraints = [ + models.UniqueConstraint(fields=["oid", "database"], name="unique_schema") + ] + + @property + def _sa_engine(self): + return self.database._sa_engine + + @property + def name(self): + cache_key = f"{self.database.name}_schema_name_{self.oid}" + try: + schema_name = cache.get(cache_key) + if schema_name is None: + schema_name = schema_utils.get_schema_name_from_oid( + self.oid, self._sa_engine + ) + cache.set(cache_key, schema_name, NAME_CACHE_INTERVAL) + return schema_name + # We catch this error, since it lets us decouple the cadence of + # overall DB reflection from the cadence of cache expiration for + # schema names. Also, it makes it obvious when the DB layer has + # been altered, as opposed to other reasons for a 404 when + # requesting a schema. + except TypeError: + return 'MISSING' + + @property + def has_dependents(self): + return has_dependents( + self.oid, + self._sa_engine + ) + + # Returns only schema-scoped dependents on the top level + # However, returns dependents from other schemas for other + # objects down the graph. + # E.g: TableA from SchemaA depends on TableB from SchemaB + # SchemaA won't return as a dependent for SchemaB, however + # TableA will be a dependent of TableB which in turn depends on its schema + def get_dependents(self, exclude=None): + if exclude is None: + exclude = [] + return get_dependents_graph( + self.oid, + self._sa_engine, + exclude + ) + + @property + def description(self): + return get_schema_description(self.oid, self._sa_engine) + + def update_sa_schema(self, update_params): + result = model_utils.update_sa_schema(self, update_params) + reset_reflection(db_name=self.database.name) + return result + + def delete_sa_schema(self): + drop_schema_via_name(self._sa_engine, self.name, cascade=True) + reset_reflection(db_name=self.database.name) + + def clear_name_cache(self): + cache_key = f"{self.database.name}_schema_name_{self.oid}" + cache.delete(cache_key) + + +class ColumnNamePrefetcher(Prefetcher): + def filter(self, column_attnums, columns): + if len(columns) < 1: + return [] + table = list(columns)[0].table + return get_map_of_attnum_to_column_name( + table.oid, + column_attnums, + table._sa_engine, + metadata=get_cached_metadata(), + ) + + def mapper(self, column): + return column.attnum + + def reverse_mapper(self, column): + # We return maps mostly, so a reverse mapper is not needed + pass + + def decorator(self, column, name): + setattr(column, 'name', name) + + +class ColumnPrefetcher(Prefetcher): + def filter(self, table_ids, tables): + if len(tables) < 1: + return [] + columns = reduce(lambda column_objs, table: column_objs + list(table.columns.all()), tables, []) + table_oids = [table.oid for table in tables] + + def _get_column_names_from_tables(table_oids): + if len(tables) > 0: + engine = list(tables)[0]._sa_engine + else: + return [] + return get_map_of_attnum_and_table_oid_to_column_name( + table_oids, + engine=engine, + metadata=get_cached_metadata(), + ) + + return ColumnNamePrefetcher( + filter=lambda column_attnums, columns: _get_column_names_from_tables(table_oids), + mapper=lambda column: (column.attnum, column.table.oid) + ).fetch(columns, 'columns__name', Column, []) + + def reverse_mapper(self, column): + return [column.table_id] + + def decorator(self, table, columns): + pass + + +_sa_table_prefetcher = Prefetcher( + filter=lambda oids, tables: reflect_tables_from_oids( + oids, list(tables)[0]._sa_engine, metadata=get_cached_metadata() + ) if len(tables) > 0 else [], + mapper=lambda table: table.oid, + # A filler statement, just used to satisfy the library. It does not affect the prefetcher in + # any way as we bypass reverse mapping if the prefetcher returns a dictionary + reverse_mapper=lambda table: table.oid, + decorator=lambda table, _sa_table: setattr( + table, + '_sa_table', + _sa_table + ) +) + + +class Table(DatabaseObject, Relation): + # These are fields whose source of truth is in the model + MODEL_FIELDS = ['import_verified'] + current_objects = models.Manager() + objects = DatabaseObjectManager( + # TODO Move the Prefetcher into a separate class and replace lambdas with proper function + _sa_table=_sa_table_prefetcher, + columns=ColumnPrefetcher, + ) + schema = models.ForeignKey('Schema', on_delete=models.CASCADE, + related_name='tables') + import_verified = models.BooleanField(blank=True, null=True) + import_target = models.ForeignKey('Table', blank=True, null=True, on_delete=models.SET_NULL) + is_temp = models.BooleanField(blank=True, null=True) + + class Meta: + constraints = [ + models.UniqueConstraint(fields=["oid", "schema"], name="unique_table") + ] + + def validate_unique(self, exclude=None): + # Ensure oid is unique on db level + if Table.current_objects.filter( + oid=self.oid, schema__database=self.schema.database + ).exists(): + raise ValidationError("Table OID is not unique") + super().validate_unique(exclude=exclude) + + def save(self, *args, **kwargs): + if self._state.adding: + self.validate_unique() + super().save(*args, **kwargs) + + # TODO referenced from outside so much that it probably shouldn't be private + # TODO use below decorator in place of cached_property to prevent redundant reflection from + # redundant model instances. + # + # @key_cached_property( + # key_fn=lambda table: ( + # 'sa_table', + # table.schema.database.name, + # table.oid, + # ) + # ) + @cached_property + def _sa_table(self): + # We're caching since we want different Django Table instances to return the same SA + # Table, when they're referencing the same Postgres table. + try: + sa_table = reflect_table_from_oid( + oid=self.oid, + engine=self._sa_engine, + metadata=get_cached_metadata(), + ) + # We catch these errors, since it lets us decouple the cadence of + # overall DB reflection from the cadence of cache expiration for + # table names. Also, it makes it obvious when the DB layer has + # been altered, as opposed to other reasons for a 404 when + # requesting a table. + except (TypeError, IndexError): + sa_table = table_utils.get_empty_table("MISSING") + return sa_table + + # NOTE: it's a problem that we have both _sa_table and _enriched_column_sa_table. at the moment + # it has to be this way because enriched column is not always interachangeable with sa column. + @property + def _enriched_column_sa_table(self): + return column_utils.get_enriched_column_table( + table=self._sa_table, + engine=self._sa_engine, + metadata=get_empty_metadata(), + ) + + @property + def primary_key_column_name(self): + pk_column = get_primary_key_column(self._sa_table) + return pk_column.name + + @property + def sa_columns(self): + return self._enriched_column_sa_table.columns + + @property + def _sa_engine(self): + return self.schema._sa_engine + + @property + def name(self): + return self._sa_table.name + + @property + def sa_column_names(self): + return self.sa_columns.keys() + + @property + def sa_constraints(self): + return self._sa_table.constraints + + @property + def has_dependents(self): + return has_dependents( + self.oid, + self.schema._sa_engine + ) + + @property + def description(self): + return get_table_description(self.oid, self._sa_engine) + + def get_dependents(self, exclude=None): + if exclude is None: + exclude = [] + return get_dependents_graph( + self.oid, + self.schema._sa_engine, + exclude + ) + + def get_ui_dependents(self): + """ + Returns all service layer dependents. For now only Data Explorer Query is considered + """ + + return { + 'queries': self.queries.all().values_list('id', flat=True) + } + + def add_column(self, column_data): + result = create_column( + self.schema._sa_engine, + self.oid, + column_data, + ) + reset_reflection(db_name=self.schema.database.name) + return result + + def alter_column(self, column_attnum, column_data): + result = alter_column( + self.schema._sa_engine, + self.oid, + column_attnum, + column_data, + ) + reset_reflection(db_name=self.schema.database.name) + return result + + def drop_column(self, column_attnum): + drop_column( + self.oid, + column_attnum, + self.schema._sa_engine, + ) + reset_reflection(db_name=self.schema.database.name) + + def duplicate_column(self, column_attnum, copy_data, copy_constraints, name=None): + result = duplicate_column( + self.oid, + column_attnum, + self.schema._sa_engine, + new_column_name=name, + copy_data=copy_data, + copy_constraints=copy_constraints, + ) + reset_reflection(db_name=self.schema.database.name) + return result + + def get_preview(self, column_definitions): + return get_column_cast_records( + self.schema._sa_engine, self._sa_table, column_definitions + ) + + # TODO unused? delete if so + @property + def sa_all_records(self): + return get_records( + table=self._sa_table, + engine=self.schema._sa_engine, + fallback_to_default_ordering=True, + ) + + def sa_num_records(self, filter=None, search=None): + if search is None: + search = [] + return get_count( + table=self._sa_table, + engine=self.schema._sa_engine, + filter=filter, + search=search, + ) + + def update_sa_table(self, update_params): + result = model_utils.update_sa_table(self, update_params) + return result + + def delete_sa_table(self): + result = drop_table(self.name, self.schema.name, self.schema._sa_engine, cascade=True) + reset_reflection(db_name=self.schema.database.name) + return result + + def get_record(self, id_value): + return get_record(self._sa_table, self.schema._sa_engine, id_value) + + # TODO consider using **kwargs instead of forwarding params one-by-one + def get_records( + self, + limit=None, + offset=None, + filter=None, + order_by=None, + group_by=None, + search=None, + duplicate_only=None, + ): + if order_by is None: + order_by = [] + if search is None: + search = [] + return get_records( + table=self._sa_table, + engine=self.schema._sa_engine, + limit=limit, + offset=offset, + filter=filter, + order_by=order_by, + group_by=group_by, + search=search, + duplicate_only=duplicate_only, + fallback_to_default_ordering=True, + ) + + def create_record_or_records(self, record_data): + return insert_record_or_records(self._sa_table, self.schema._sa_engine, record_data) + + def update_record(self, id_value, record_data): + return update_record(self._sa_table, self.schema._sa_engine, id_value, record_data) + + def delete_record(self, id_value): + return delete_record(self._sa_table, self.schema._sa_engine, id_value) + + def bulk_delete_records(self, id_values): + return bulk_delete_records(self._sa_table, self.schema._sa_engine, id_values) + + def add_constraint(self, constraint_obj): + # The max here has the effect of filtering for the largest OID, which is + # the most newly-created constraint. Other methods (e.g., trying to get + # a constraint by name when it wasn't set here) are even less robust. + constraint_oid = max( + add_constraint_via_sql_alchemy(constraint_obj, engine=self._sa_engine) + ) + result = Constraint.current_objects.create(oid=constraint_oid, table=self) + reset_reflection(db_name=self.schema.database.name) + return result + + def get_column_name_id_bidirectional_map(self): + columns = Column.objects.filter(table_id=self.id).select_related('table__schema__database').prefetch('name') + columns_map = bidict({column.name: column.id for column in columns}) + return columns_map + + def get_column_name_type_map(self): + columns = Column.objects.filter(table_id=self.id) + columns_map = [(column.name, column.db_type) for column in columns] + return columns_map + + def get_column_by_name(self, name): + columns = self.get_columns_by_name(name_list=[name]) + if len(columns) > 0: + return columns[0] + + def get_columns_by_name(self, name_list): + columns_by_name_dict = { + col.name: col + for col + in Column.objects.filter(table=self) + if col.name in name_list + } + return [ + columns_by_name_dict[col_name] + for col_name + in name_list + ] + + def move_columns(self, columns_to_move, target_table): + # Collect various information about relevant columns before mutating + columns_attnum_to_move = [column.attnum for column in columns_to_move] + target_table_oid = target_table.oid + column_names_to_move = [column.name for column in columns_to_move] + target_columns_name_id_map = target_table.get_column_name_id_bidirectional_map() + column_names_id_map = self.get_column_name_id_bidirectional_map() + + # Mutate on Postgres + extracted_sa_table, remainder_sa_table = move_columns_between_related_tables( + source_table_oid=self.oid, + target_table_oid=target_table_oid, + column_attnums_to_move=columns_attnum_to_move, + schema=self.schema.name, + engine=self._sa_engine + ) + engine = self._sa_engine + + # Replicate mutation on Django, so that Django-layer-specific information is preserved + extracted_table_oid = get_oid_from_table(extracted_sa_table.name, extracted_sa_table.schema, engine) + remainder_table_oid = get_oid_from_table(remainder_sa_table.name, remainder_sa_table.schema, engine) + target_table.oid = extracted_table_oid + target_table.save() + # Refresh existing target table columns to use correct attnum preventing conflicts with the moved column + existing_target_column_names = target_columns_name_id_map.keys() + target_table.update_column_reference(existing_target_column_names, target_columns_name_id_map) + # Add the moved column + target_table.update_column_reference(column_names_to_move, column_names_id_map) + self.oid = remainder_table_oid + self.save() + remainder_column_names = column_names_id_map.keys() - column_names_to_move + self.update_column_reference(remainder_column_names, column_names_id_map) + reset_reflection(db_name=self.schema.database.name) + return extracted_sa_table, remainder_sa_table + + def split_table( + self, + columns_to_extract, + extracted_table_name, + column_names_id_map, + relationship_fk_column_name + ): + # Collect various information about relevant columns before mutating + columns_attnum_to_extract = [column.attnum for column in columns_to_extract] + extracted_column_names = [column.name for column in columns_to_extract] + remainder_column_names = column_names_id_map.keys() - extracted_column_names + + # Mutate on Postgres + extracted_table_oid, remainder_table_oid, linking_fk_column_attnum = extract_columns_from_table( + self.oid, + columns_attnum_to_extract, + extracted_table_name, + self.schema.name, + self._sa_engine, + relationship_fk_column_name + ) + # Replicate mutation on Django, so that Django-layer-specific information is preserved + extracted_table = Table(oid=extracted_table_oid, schema=self.schema) + extracted_table.save() + + # Update attnum as it would have changed due to columns moving to a new table. + extracted_table.update_column_reference(extracted_column_names, column_names_id_map) + remainder_table = Table.current_objects.get(schema__database=self.schema.database, oid=remainder_table_oid) + remainder_table.update_column_reference(remainder_column_names, column_names_id_map) + reset_reflection(db_name=self.schema.database.name) + remainder_fk_column = Column.objects.get(table=remainder_table, attnum=linking_fk_column_attnum) + + return extracted_table, remainder_table, remainder_fk_column + + def update_column_reference(self, column_names, column_name_id_map): + """ + Will update the columns specified via column_names to have the right attnum and to be part + of this table. + """ + column_names_attnum_map = get_column_attnum_from_names_as_map( + self.oid, + column_names, + self._sa_engine, + metadata=get_cached_metadata(), + ) + column_objs = [] + for column_name, column_attnum in column_names_attnum_map.items(): + column_id = column_name_id_map[column_name] + column = Column.current_objects.get(id=column_id) + column.table_id = self.id + column.attnum = column_attnum + column_objs.append(column) + Column.current_objects.bulk_update(column_objs, fields=['table_id', 'attnum']) + + def insert_records_to_existing_table(self, existing_table, data_files, mappings=None): + from_table = self._sa_table + target_table = existing_table._sa_table + engine = self._sa_engine + if mappings: + col_mappings = [[from_col.name, target_col.name] for from_col, target_col in mappings] + else: + col_mappings = None + data_file = data_files[0] + try: + table, _ = insert_from_select(from_table, target_table, engine, col_mappings) + data_file.table_imported_to = existing_table + except Exception as e: + # ToDo raise specific exceptions. + raise e + return table + + def suggest_col_mappings_for_import(self, existing_table): + temp_table_col_list = self.get_column_name_type_map() + target_table_col_list = existing_table.get_column_name_type_map() + temp_table_name_id_map = self.get_column_name_id_bidirectional_map() + target_table_name_id_map = existing_table.get_column_name_id_bidirectional_map() + column_mappings = column_utils.find_match(temp_table_col_list, target_table_col_list, self._sa_engine) + + # Convert python list indices to django ids. + mappings = [ + ( + temp_table_name_id_map[ + temp_table_col_list[from_col][0] # from_column name + ], + target_table_name_id_map[ + target_table_col_list[target_col][0] # target_column name + ] + ) for from_col, target_col in column_mappings + ] + return mappings + + +class Column(ReflectionManagerMixin, BaseModel): + table = models.ForeignKey('Table', on_delete=models.CASCADE, related_name='columns') + attnum = models.IntegerField() + display_options = JSONField(null=True, default=None) + + class Meta: + constraints = [ + models.UniqueConstraint(fields=["attnum", "table"], name="unique_column") + ] + + def __str__(self): + return f"{self.__class__.__name__}: {self.table_id}-{self.attnum}" + + def __getattribute__(self, name): + try: + return super().__getattribute__(name) + except AttributeError as e: + # Blacklist Django attribute names that cause recursion by trying to fetch an invalid cache. + # TODO Find a better way to avoid finding Django related columns + blacklisted_attribute_names = ['resolve_expression', '_prefetched_objects_cache'] + if name not in blacklisted_attribute_names: + return getattr(self._sa_column, name) + else: + raise e + + current_objects = models.Manager() + objects = DatabaseObjectManager( + name=ColumnNamePrefetcher + ) + + @property + def _sa_engine(self): + return self.table._sa_engine + + # TODO probably shouldn't be private: a lot of code already references it. + @property + def _sa_column(self): + return self.table.sa_columns[self.name] + + # TODO use below decorator in place of cached_property to prevent redundant reflection from + # redundant model instances. + # + # @key_cached_property( + # key_fn=lambda column: ( + # "column name", + # column.table.schema.database.name, + # column.table.schema.name, + # column.table.oid, + # column.attnum, + # ) + # ) + @cached_property + def name(self): + name = get_column_name_from_attnum( + self.table.oid, + self.attnum, + self._sa_engine, + metadata=get_cached_metadata(), + ) + assert type(name) is str + if name is None: + raise ProgrammingAPIException( + Exception( + "attempted to access column's name after it was dropped" + ) + ) + else: + return name + + @property + def description(self): + return get_column_description(self.table.oid, self.attnum, self._sa_engine) + + @property + def ui_type(self): + if self.db_type: + return get_ui_type_from_db_type(self.db_type) + + @property + def db_type(self): + return self._sa_column.db_type + + @property + def has_dependents(self): + return has_dependents( + self.table.oid, + self._sa_engine, + self.attnum + ) + + def get_dependents(self, exclude): + return get_dependents_graph( + self.table.oid, + self._sa_engine, + exclude, + self.attnum + ) + + +class Constraint(DatabaseObject): + table = models.ForeignKey('Table', on_delete=models.CASCADE, related_name='constraints') + + class Meta: + constraints = [ + models.UniqueConstraint(fields=["oid", "table"], name="unique_constraint") + ] + + # TODO try to cache this for an entire request + @property + def _constraint_record(self): + engine = self.table.schema.database._sa_engine + return get_constraint_record_from_oid(self.oid, engine, get_cached_metadata()) + + @property + def name(self): + return self._constraint_record['conname'] + + @property + def type(self): + return constraint_utils.get_constraint_type_from_char(self._constraint_record['contype']) + + @cached_property + def columns(self): + column_attnum_list = self._constraint_record['conkey'] + return Column.objects.filter(table=self.table, attnum__in=column_attnum_list).order_by("attnum") + + @cached_property + def referent_columns(self): + column_attnum_list = self._constraint_record['confkey'] + if column_attnum_list: + foreign_relation_oid = self._constraint_record['confrelid'] + columns = Column.objects.filter( + table__oid=foreign_relation_oid, + table__schema=self.table.schema, + attnum__in=column_attnum_list + ).order_by("attnum") + return columns + + @property + def ondelete(self): + action_char = self._constraint_record['confdeltype'] + return constraint_utils.get_constraint_action_from_char(action_char) + + @property + def onupdate(self): + action_char = self._constraint_record['confupdtype'] + return constraint_utils.get_constraint_action_from_char(action_char) + + @property + def deferrable(self): + return self._constraint_record['condeferrable'] + + @property + def match(self): + type_char = self._constraint_record['confmatchtype'] + return constraint_utils.get_constraint_match_type_from_char(type_char) + + def drop(self): + drop_constraint( + self.table._sa_table.name, + self.table._sa_table.schema, + self.table.schema._sa_engine, + self.name + ) + self.delete() + reset_reflection(db_name=self.table.schema.database.name) + + +class DataFile(BaseModel): + created_from_choices = models.TextChoices("created_from", "FILE PASTE URL") + file_type_choices = models.TextChoices("type", "CSV TSV JSON") + + file = models.FileField(upload_to=model_utils.user_directory_path) + user = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.CASCADE) + created_from = models.CharField(max_length=128, choices=created_from_choices.choices) + type = models.CharField(max_length=128, choices=file_type_choices.choices) + table_imported_to = models.ForeignKey(Table, related_name="data_files", blank=True, + null=True, on_delete=models.SET_NULL) + + base_name = models.CharField(max_length=100) + header = models.BooleanField(default=True) + max_level = models.IntegerField(default=0, blank=True) + sheet_index = models.IntegerField(default=0) + delimiter = models.CharField(max_length=1, default=',', blank=True) + escapechar = models.CharField(max_length=1, blank=True) + quotechar = models.CharField(max_length=1, default='"', blank=True) + + +class PreviewColumnSettings(BaseModel): + customized = models.BooleanField() + template = models.CharField(max_length=255) + + +def validate_column_order(value): + """ + Custom validator to ensure that all elements in the list are positive integers. + """ + if not all(isinstance(item, int) and item > 0 for item in value): + raise ValidationError("All elements of column order must be positive integers.") + + +class TableSettings(ReflectionManagerMixin, BaseModel): + preview_settings = models.OneToOneField(PreviewColumnSettings, on_delete=models.CASCADE) + table = models.OneToOneField(Table, on_delete=models.CASCADE, related_name="settings") + column_order = JSONField(null=True, blank=True, default=None, validators=[validate_column_order]) + + def save(self, **kwargs): + # Cleans the fields before saving by running respective field validator(s) + try: + self.clean_fields() + except ValidationError as e: + raise e + super().save(**kwargs) + + +def _create_table_settings(tables): + # TODO Bulk create preview settings to improve performance + for table in tables: + preview_column_settings = PreviewColumnSettings.objects.create(customized=False) + TableSettings.current_objects.create(table=table, preview_settings=preview_column_settings) + + +def _set_default_preview_template(table): + if not table.settings.preview_settings.customized: + preview_template = compute_default_preview_template(table) + preview_settings = table.settings.preview_settings + preview_settings.template = preview_template + preview_settings.save() + + +def compute_default_preview_template(table): + columns = Column.current_objects.filter(table=table).prefetch_related( + 'table', + 'table__schema', + 'table__schema__database' + ).order_by('attnum') + preview_column = None + primary_key_column = None + for column in columns: + if column.primary_key: + primary_key_column = column + else: + preview_column = column + break + if preview_column is None: + preview_column = primary_key_column + + if preview_column: + preview_template = f"{{{preview_column.id}}}" + else: + # The table does not contain any column, show blank in such scenario. + preview_template = "" + return preview_template diff --git a/mathesar/models/query.py b/mathesar/models/query.py index 539dcead89..a82450f4d4 100644 --- a/mathesar/models/query.py +++ b/mathesar/models/query.py @@ -28,13 +28,14 @@ ListOfDictValidator, TransformationsValidator, ) +from mathesar.models.base import BaseModel from mathesar.state.cached_property import cached_property -from mathesar.models.base import BaseModel, Column +from mathesar.models.deprecated import Column from mathesar.models.relation import Relation from mathesar.state import get_cached_metadata -class UIQuery(BaseModel, Relation): +class Exploration(BaseModel, Relation): name = models.CharField( max_length=128, ) @@ -175,7 +176,7 @@ def replace_transformations_with_processed_transformations(self): Whereas before the transformations attribute was a one-way flow from the client, now it's something that the backend may redefine. This a significant complication of the - data flow. For example, if you replace transformations on a saved UIQuery and save it + data flow. For example, if you replace transformations on a saved Exploration and save it again, we must trigger a reflection, which can have a performance impact. Also, frontend must expect that certain transformations might alter the transformation pipeline, which would then need reflecting by frontend; that might be a breaking change. diff --git a/mathesar/models/shares.py b/mathesar/models/shares.py index 20f076080a..15f9cc3cda 100644 --- a/mathesar/models/shares.py +++ b/mathesar/models/shares.py @@ -24,5 +24,5 @@ class SharedTable(SharedEntity): class SharedQuery(SharedEntity): query = models.ForeignKey( - 'UIQuery', on_delete=models.CASCADE, related_name='shared_query' + 'Exploration', on_delete=models.CASCADE, related_name='shared_query' ) diff --git a/mathesar/models/users.py b/mathesar/models/users.py index 9e3fcbe86a..301ea76314 100644 --- a/mathesar/models/users.py +++ b/mathesar/models/users.py @@ -2,7 +2,8 @@ from django.contrib.auth.models import AbstractUser from django.db import models -from mathesar.models.base import BaseModel, Database, Schema +from mathesar.models.base import BaseModel +from mathesar.models.deprecated import Connection, Schema class User(AbstractUser): @@ -17,6 +18,9 @@ class User(AbstractUser): password_change_needed = models.BooleanField(default=False) display_language = models.CharField(max_length=30, blank=True, default='en') + def metadata_privileges(self, database_id): + return 'read write' + class Role(models.TextChoices): MANAGER = 'manager', 'Manager' @@ -26,7 +30,7 @@ class Role(models.TextChoices): class DatabaseRole(BaseModel): user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='database_roles') - database = models.ForeignKey(Database, on_delete=models.CASCADE) + database = models.ForeignKey(Connection, on_delete=models.CASCADE) role = models.CharField(max_length=10, choices=Role.choices) class Meta: diff --git a/mathesar/rpc/collaborators.py b/mathesar/rpc/collaborators.py new file mode 100644 index 0000000000..8273f9d4b7 --- /dev/null +++ b/mathesar/rpc/collaborators.py @@ -0,0 +1,123 @@ +from typing import TypedDict + +from modernrpc.core import rpc_method +from modernrpc.auth.basic import http_basic_auth_login_required, http_basic_auth_superuser_required + +from mathesar.models.base import UserDatabaseRoleMap, Database, ConfiguredRole +from mathesar.models.users import User +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions + + +class CollaboratorInfo(TypedDict): + """ + Information about a collaborator. + + Attributes: + id: the Django ID of the UserDatabaseRoleMap model instance. + user_id: The Django ID of the User model instance of the collaborator. + database_id: the Django ID of the Database model instance for the collaborator. + configured_role_id: The Django ID of the ConfiguredRole model instance for the collaborator. + """ + id: int + user_id: int + database_id: int + configured_role_id: int + + @classmethod + def from_model(cls, model): + return cls( + id=model.id, + user_id=model.user.id, + database_id=model.database.id, + configured_role_id=model.configured_role.id + ) + + +@rpc_method(name="collaborators.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, database_id: int = None, **kwargs) -> list[CollaboratorInfo]: + """ + List information about collaborators. Exposed as `list`. + + If called with no `database_id`, all collaborators for all databases are listed. + + Args: + database_id: The Django id of the database associated with the collaborators. + + Returns: + A list of collaborators. + """ + if database_id is not None: + user_database_role_map_qs = UserDatabaseRoleMap.objects.filter(database__id=database_id) + else: + user_database_role_map_qs = UserDatabaseRoleMap.objects.all() + + return [CollaboratorInfo.from_model(db_model) for db_model in user_database_role_map_qs] + + +@rpc_method(name='collaborators.add') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def add( + *, + database_id: int, + user_id: int, + configured_role_id: int, + **kwargs +) -> CollaboratorInfo: + """ + Set up a new collaborator for a database. + + Args: + database_id: The Django id of the Database to associate with the collaborator. + user_id: The Django id of the User model instance who'd be the collaborator. + configured_role_id: The Django id of the ConfiguredRole model instance to associate with the collaborator. + """ + database = Database.objects.get(id=database_id) + user = User.objects.get(id=user_id) + configured_role = ConfiguredRole.objects.get(id=configured_role_id) + collaborator = UserDatabaseRoleMap.objects.create( + database=database, + user=user, + configured_role=configured_role, + server=configured_role.server + ) + return CollaboratorInfo.from_model(collaborator) + + +@rpc_method(name='collaborators.delete') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def delete(*, collaborator_id: int, **kwargs): + """ + Delete a collaborator from a database. + + Args: + collaborator_id: The Django id of the UserDatabaseRoleMap model instance of the collaborator. + """ + collaborator = UserDatabaseRoleMap.objects.get(id=collaborator_id) + collaborator.delete() + + +@rpc_method(name='collaborators.set_role') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def set_role( + *, + collaborator_id: int, + configured_role_id: int, + **kwargs +) -> CollaboratorInfo: + """ + Set the role of a collaborator for a database. + + Args: + collaborator_id: The Django id of the UserDatabaseRoleMap model instance of the collaborator. + configured_role_id: The Django id of the ConfiguredRole model instance to associate with the collaborator. + """ + collaborator = UserDatabaseRoleMap.objects.get(id=collaborator_id) + configured_role = ConfiguredRole.objects.get(id=configured_role_id) + collaborator.configured_role = configured_role + collaborator.save() + return CollaboratorInfo.from_model(collaborator) diff --git a/mathesar/rpc/columns/__init__.py b/mathesar/rpc/columns/__init__.py new file mode 100644 index 0000000000..4b40b38c84 --- /dev/null +++ b/mathesar/rpc/columns/__init__.py @@ -0,0 +1 @@ +from .base import * # noqa diff --git a/mathesar/rpc/columns/base.py b/mathesar/rpc/columns/base.py new file mode 100644 index 0000000000..16a7641e39 --- /dev/null +++ b/mathesar/rpc/columns/base.py @@ -0,0 +1,315 @@ +""" +Classes and functions exposed to the RPC endpoint for managing table columns. +""" +from typing import Literal, Optional, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.columns.operations.alter import alter_columns_in_table +from db.columns.operations.create import add_columns_to_table +from db.columns.operations.drop import drop_columns_from_table +from db.columns.operations.select import get_column_info_for_table +from mathesar.rpc.columns.metadata import ColumnMetaDataBlob +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.utils import connect +from mathesar.utils.columns import get_columns_meta_data + + +class TypeOptions(TypedDict, total=False): + """ + Options applied to a type. All attributes are optional. + + Take special care with the difference between numeric and date/time + types w.r.t. precision. The attribute has a different meaning + depending on the type to which it's being applied. + + Attributes: + precision: For numeric types, the number of significant digits. + For date/time types, the number of fractional digits. + scale: For numeric types, the number of fractional digits. + fields: Which time fields are stored. See Postgres docs. + length: The maximum length of a character-type field. + item_type: The member type for arrays. + """ + precision: int + scale: int + fields: str + length: int + item_type: str + + @classmethod + def from_dict(cls, type_options): + if type_options is None: + return + # All keys are optional, but we want to validate the keys we + # actually return. + all_keys = dict( + precision=type_options.get("precision"), + scale=type_options.get("scale"), + fields=type_options.get("fields"), + length=type_options.get("length"), + item_type=type_options.get("item_type"), + ) + reduced_keys = {k: v for k, v in all_keys.items() if v is not None} + if reduced_keys != {}: + return cls(**reduced_keys) + + +class ColumnDefault(TypedDict): + """ + A dictionary describing the default value for a column. + + Attributes: + value: An SQL expression giving the default value. + is_dynamic: Whether the `value` is possibly dynamic. + """ + value: str + is_dynamic: bool + + @classmethod + def from_dict(cls, col_default): + if col_default is not None: + return cls( + value=col_default["value"], + is_dynamic=col_default["is_dynamic"], + ) + + +class CreatableColumnInfo(TypedDict): + """ + Information needed to add a new column. + + No keys are required. + + Attributes: + name: The name of the column. + type: The type of the column on the database. + type_options: The options applied to the column type. + nullable: Whether or not the column is nullable. + default: The default value. + description: The description of the column. + """ + name: Optional[str] + type: Optional[str] + type_options: Optional[TypeOptions] + nullable: Optional[bool] + default: Optional[ColumnDefault] + description: Optional[str] + + +class SettableColumnInfo(TypedDict): + """ + Information about a column, restricted to settable fields. + + When possible, Passing `null` for a key will clear the underlying + setting. E.g., + + - `default = null` clears the column default setting. + - `type_options = null` clears the type options for the column. + - `description = null` clears the column description. + + Setting any of `name`, `type`, or `nullable` is a noop. + + + Only the `id` key is required. + + Attributes: + id: The `attnum` of the column in the table. + name: The name of the column. + type: The type of the column on the database. + type_options: The options applied to the column type. + nullable: Whether or not the column is nullable. + default: The default value. + description: The description of the column. + """ + id: int + name: Optional[str] + type: Optional[str] + type_options: Optional[TypeOptions] + nullable: Optional[bool] + default: Optional[ColumnDefault] + description: Optional[str] + + +class PreviewableColumnInfo(TypedDict): + """ + Information needed to preview a column. + + Attributes: + id: The `attnum` of the column in the table. + type: The new type to be applied to a column. + type_options: The options to be applied to the column type. + """ + id: int + type: Optional[str] + type_options: Optional[TypeOptions] + + +class ColumnInfo(TypedDict): + """ + Information about a column. Extends the settable fields. + + Attributes: + id: The `attnum` of the column in the table. + name: The name of the column. + type: The type of the column on the database. + type_options: The options applied to the column type. + nullable: Whether or not the column is nullable. + primary_key: Whether the column is in the primary key. + default: The default value and whether it's dynamic. + has_dependents: Whether the column has dependent objects. + description: The description of the column. + current_role_priv: The privileges available to the user for the column. + valid_target_types: A list of all types to which the column can + be cast. + """ + id: int + name: str + type: str + type_options: TypeOptions + nullable: bool + primary_key: bool + default: ColumnDefault + has_dependents: bool + description: str + current_role_priv: list[Literal['SELECT', 'INSERT', 'UPDATE', 'REFERENCES']] + valid_target_types: list[str] + + @classmethod + def from_dict(cls, col_info): + return cls( + id=col_info["id"], + name=col_info["name"], + type=col_info["type"], + type_options=TypeOptions.from_dict(col_info.get("type_options")), + nullable=col_info["nullable"], + primary_key=col_info["primary_key"], + default=ColumnDefault.from_dict(col_info.get("default")), + has_dependents=col_info["has_dependents"], + description=col_info.get("description"), + current_role_priv=col_info["current_role_priv"], + valid_target_types=col_info.get("valid_target_types") + ) + + +@rpc_method(name="columns.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, table_oid: int, database_id: int, **kwargs) -> list[ColumnInfo]: + """ + List information about columns for a table. Exposed as `list`. + + Args: + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + + Returns: + A list of column details. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_column_info = get_column_info_for_table(table_oid, conn) + return [ColumnInfo.from_dict(col) for col in raw_column_info] + + +@rpc_method(name="columns.add") +@http_basic_auth_login_required +@handle_rpc_exceptions +def add( + *, + column_data_list: list[CreatableColumnInfo], + table_oid: int, + database_id: int, + **kwargs +) -> list[int]: + """ + Add columns to a table. + + There are defaults for both the name and type of a column, and so + passing `[{}]` for `column_data_list` would add a single column of + type `CHARACTER VARYING`, with an auto-generated name. + + Args: + column_data_list: A list describing desired columns to add. + table_oid: Identity of the table to which we'll add columns. + database_id: The Django id of the database containing the table. + + Returns: + An array of the attnums of the new columns. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return add_columns_to_table(table_oid, column_data_list, conn) + + +@rpc_method(name="columns.patch") +@http_basic_auth_login_required +@handle_rpc_exceptions +def patch( + *, + column_data_list: list[SettableColumnInfo], + table_oid: int, + database_id: int, + **kwargs +) -> int: + """ + Alter details of preexisting columns in a table. + + Does not support altering the type or type options of array columns. + + Args: + column_data_list: A list describing desired column alterations. + table_oid: Identity of the table whose columns we'll modify. + database_id: The Django id of the database containing the table. + + Returns: + The number of columns altered. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return alter_columns_in_table(table_oid, column_data_list, conn) + + +@rpc_method(name="columns.delete") +@http_basic_auth_login_required +@handle_rpc_exceptions +def delete( + *, column_attnums: list[int], table_oid: int, database_id: int, **kwargs +) -> int: + """ + Delete columns from a table. + + Args: + column_attnums: A list of attnums of columns to delete. + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + + Returns: + The number of columns dropped. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return drop_columns_from_table(table_oid, column_attnums, conn) + + +@rpc_method(name="columns.list_with_metadata") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_with_metadata(*, table_oid: int, database_id: int, **kwargs) -> list: + """ + List information about columns for a table, along with the metadata associated with each column. + Args: + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + Returns: + A list of column details. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + column_info = get_column_info_for_table(table_oid, conn) + column_metadata = get_columns_meta_data(table_oid, database_id) + metadata_map = { + c.attnum: ColumnMetaDataBlob.from_model(c) for c in column_metadata + } + return [col | {"metadata": metadata_map.get(col["id"])} for col in column_info] diff --git a/mathesar/rpc/columns/metadata.py b/mathesar/rpc/columns/metadata.py new file mode 100644 index 0000000000..4b3876de05 --- /dev/null +++ b/mathesar/rpc/columns/metadata.py @@ -0,0 +1,171 @@ +""" +Classes and functions exposed to the RPC endpoint for managing column metadata. +""" +from typing import Literal, Optional, TypedDict + +from modernrpc.core import rpc_method +from modernrpc.auth.basic import http_basic_auth_login_required + +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.utils.columns import get_columns_meta_data, set_columns_meta_data + + +class ColumnMetaDataRecord(TypedDict): + """ + Metadata for a column in a table. + + Only the `database`, `table_oid`, and `attnum` keys are required. + + Attributes: + database_id: The Django id of the database containing the table. + table_oid: The OID of the table containing the column. + attnum: The attnum of the column in the table. + bool_input: How the input for a boolean column should be shown. + bool_true: A string to display for `true` values. + bool_false: A string to display for `false` values. + num_min_frac_digits: Minimum digits shown after the decimal point. + num_max_frac_digits: Maximum digits shown after the decimal point. + num_grouping: Specifies how grouping separators are displayed for numeric values. + num_format: Specifies the locale-specific format for displaying numeric values. + mon_currency_symbol: The currency symbol shown for money value. + mon_currency_location: Where the currency symbol should be shown. + time_format: A string representing the format of time values. + date_format: A string representing the format of date values. + duration_min: The smallest unit for displaying durations. + duration_max: The largest unit for displaying durations. + """ + database_id: int + table_oid: int + attnum: int + bool_input: Optional[Literal["dropdown", "checkbox"]] + bool_true: Optional[str] + bool_false: Optional[str] + num_min_frac_digits: Optional[int] + num_max_frac_digits: Optional[int] + num_grouping: Optional[str] + num_format: Optional[str] + mon_currency_symbol: Optional[str] + mon_currency_location: Optional[Literal["after-minus", "end-with-space"]] + time_format: Optional[str] + date_format: Optional[str] + duration_min: Optional[str] + duration_max: Optional[str] + + @classmethod + def from_model(cls, model): + return cls( + database_id=model.database.id, + table_oid=model.table_oid, + attnum=model.attnum, + bool_input=model.bool_input, + bool_true=model.bool_true, + bool_false=model.bool_false, + num_min_frac_digits=model.num_min_frac_digits, + num_max_frac_digits=model.num_max_frac_digits, + num_grouping=model.num_grouping, + num_format=model.num_format, + mon_currency_symbol=model.mon_currency_symbol, + mon_currency_location=model.mon_currency_location, + time_format=model.time_format, + date_format=model.date_format, + duration_min=model.duration_min, + duration_max=model.duration_max, + ) + + +class ColumnMetaDataBlob(TypedDict): + """ + The metadata fields which can be set for a column in a table. + + Attributes: + attnum: The attnum of the column in the table. + bool_input: How the input for a boolean column should be shown. + bool_true: A string to display for `true` values. + bool_false: A string to display for `false` values. + num_min_frac_digits: Minimum digits shown after the decimal point. + num_max_frac_digits: Maximum digits shown after the decimal point. + num_grouping: Specifies how grouping separators are displayed for numeric values. + num_format: Specifies the locale-specific format for displaying numeric values. + mon_currency_symbol: The currency symbol shown for money value. + mon_currency_location: Where the currency symbol should be shown. + time_format: A string representing the format of time values. + date_format: A string representing the format of date values. + duration_min: The smallest unit for displaying durations. + duration_max: The largest unit for displaying durations. + """ + attnum: int + bool_input: Optional[Literal["dropdown", "checkbox"]] + bool_true: Optional[str] + bool_false: Optional[str] + num_min_frac_digits: Optional[int] + num_max_frac_digits: Optional[int] + num_grouping: Optional[str] + num_format: Optional[str] + mon_currency_symbol: Optional[str] + mon_currency_location: Optional[Literal["after-minus", "end-with-space"]] + time_format: Optional[str] + date_format: Optional[str] + duration_min: Optional[str] + duration_max: Optional[str] + + @classmethod + def from_model(cls, model): + return cls( + attnum=model.attnum, + bool_input=model.bool_input, + bool_true=model.bool_true, + bool_false=model.bool_false, + num_min_frac_digits=model.num_min_frac_digits, + num_max_frac_digits=model.num_max_frac_digits, + num_grouping=model.num_grouping, + num_format=model.num_format, + mon_currency_symbol=model.mon_currency_symbol, + mon_currency_location=model.mon_currency_location, + time_format=model.time_format, + date_format=model.date_format, + duration_min=model.duration_min, + duration_max=model.duration_max, + ) + + +@rpc_method(name="columns.metadata.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, table_oid: int, database_id: int, **kwargs) -> list[ColumnMetaDataRecord]: + """ + List metadata associated with columns for a table. Exposed as `list`. + + Args: + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + + Returns: + A list of column meta data objects. + """ + columns_meta_data = get_columns_meta_data(table_oid, database_id) + return [ + ColumnMetaDataRecord.from_model(model) for model in columns_meta_data + ] + + +@rpc_method(name="columns.metadata.set") +@http_basic_auth_login_required +@handle_rpc_exceptions +def set_( + *, + column_meta_data_list: list[ColumnMetaDataBlob], + table_oid: int, + database_id: int, + **kwargs +) -> None: + """ + Set metadata associated with columns of a table for a database. Exposed as `set`. + + Args: + column_meta_data_list: A list describing desired metadata alterations. + table_oid: Identity of the table whose metadata we'll modify. + database_id: The Django id of the database containing the table. + """ + set_columns_meta_data( + column_meta_data_list, table_oid, database_id + ) diff --git a/mathesar/rpc/connections.py b/mathesar/rpc/connections.py index 7972992a47..c0486b1498 100644 --- a/mathesar/rpc/connections.py +++ b/mathesar/rpc/connections.py @@ -6,16 +6,16 @@ from modernrpc.core import rpc_method from modernrpc.auth.basic import http_basic_auth_superuser_required -from mathesar.utils import connections +from mathesar.utils import connections, permissions from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions -class DBModelReturn(TypedDict): +class ConnectionReturn(TypedDict): """ - Information about a database model. + Information about a connection model. Attributes: - id (int): The Django id of the Database object added. + id (int): The Django id of the Connection object added. nickname (str): Used to identify the added connection. database (str): The name of the database on the server. username (str): The username of the role for the connection. @@ -30,14 +30,14 @@ class DBModelReturn(TypedDict): port: int @classmethod - def from_db_model(cls, db_model): + def from_model(cls, connection): return cls( - id=db_model.id, - nickname=db_model.name, - database=db_model.db_name, - username=db_model.username, - host=db_model.host, - port=db_model.port + id=connection.id, + nickname=connection.name, + database=connection.db_name, + username=connection.username, + host=connection.host, + port=connection.port ) @@ -51,7 +51,7 @@ def add_from_known_connection( create_db: bool = False, connection_id: int = None, sample_data: list[str] = [], -) -> DBModelReturn: +) -> ConnectionReturn: """ Add a new connection from an already existing one. @@ -80,10 +80,10 @@ def add_from_known_connection( 'connection_type': connection_type, 'connection_id': connection_id } - db_model = connections.copy_connection_from_preexisting( + connection_model = connections.copy_connection_from_preexisting( connection, nickname, database, create_db, sample_data ) - return DBModelReturn.from_db_model(db_model) + return ConnectionReturn.from_model(connection_model) @rpc_method(name='connections.add_from_scratch') @@ -98,7 +98,7 @@ def add_from_scratch( host: str, port: int, sample_data: list[str] = [], -) -> DBModelReturn: +) -> ConnectionReturn: """ Add a new connection to a PostgreSQL server from scratch. @@ -121,7 +121,26 @@ def add_from_scratch( Returns: Metadata about the Database associated with the connection. """ - db_model = connections.create_connection_from_scratch( + connection_model = connections.create_connection_from_scratch( user, password, host, port, nickname, database, sample_data ) - return DBModelReturn.from_db_model(db_model) + return ConnectionReturn.from_model(connection_model) + + +@rpc_method(name='connections.grant_access_to_user') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def grant_access_to_user(*, connection_id: int, user_id: int): + """ + Migrate a connection to new models and grant access to a user. + + This function is designed to be temporary, and should probably be + removed once we have completed the new users and permissions setup + for beta. You pass any conneciton id and user id. The function will + fill the required models as needed. + + Args: + connection_id: The Django id of an old-style connection. + user_id: The Django id of a user. + """ + permissions.migrate_connection_for_user(connection_id, user_id) diff --git a/mathesar/rpc/constraints.py b/mathesar/rpc/constraints.py new file mode 100644 index 0000000000..dd5aef8fb4 --- /dev/null +++ b/mathesar/rpc/constraints.py @@ -0,0 +1,176 @@ +""" +Classes and functions exposed to the RPC endpoint for managing table constraints. +""" +from typing import Optional, TypedDict, Union + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.constraints.operations.select import get_constraints_for_table +from db.constraints.operations.create import create_constraint +from db.constraints.operations.drop import drop_constraint_via_oid +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.utils import connect + + +class ForeignKeyConstraint(TypedDict): + """ + Information about a foreign key constraint. + + Attributes: + type: The type of the constraint(`'f'` for foreign key constraint). + columns: List of columns to set a foreign key on. + fkey_relation_id: The OID of the referent table. + fkey_columns: List of referent column(s). + name: The name of the constraint. + deferrable: Whether to postpone constraint checking until the end of the transaction. + fkey_update_action: Specifies what action should be taken when the referenced key is updated. + Valid options include `'a'(no action)`(default behavior), `'r'(restrict)`, `'c'(cascade)`, `'n'(set null)`, `'d'(set default)` + fkey_delete_action: Specifies what action should be taken when the referenced key is deleted. + Valid options include `'a'(no action)`(default behavior), `'r'(restrict)`, `'c'(cascade)`, `'n'(set null)`, `'d'(set default)` + fkey_match_type: Specifies how the foreign key matching should be performed. + Valid options include `'f'(full match)`, `'s'(simple match)`(default behavior). + """ + type: str = 'f' + columns: list[int] + fkey_relation_id: int + fkey_columns: list[int] + name: Optional[str] + deferrable: Optional[bool] + fkey_update_action: Optional[str] + fkey_delete_action: Optional[str] + fkey_match_type: Optional[str] + + +class PrimaryKeyConstraint(TypedDict): + """ + Information about a primary key constraint. + + Attributes: + type: The type of the constraint(`'p'` for primary key constraint). + columns: List of columns to set a primary key on. + name: The name of the constraint. + deferrable: Whether to postpone constraint checking until the end of the transaction. + """ + type: str = 'p' + columns: list[int] + name: Optional[str] + deferrable: Optional[bool] + + +class UniqueConstraint(TypedDict): + """ + Information about a unique constraint. + + Attributes: + type: The type of the constraint(`'u'` for unique constraint). + columns: List of columns to set a unique constraint on. + name: The name of the constraint. + deferrable: Whether to postpone constraint checking until the end of the transaction. + """ + type: str = 'u' + columns: list[int] + name: Optional[str] + deferrable: Optional[bool] + + +CreatableConstraintInfo = list[Union[ForeignKeyConstraint, PrimaryKeyConstraint, UniqueConstraint]] +""" +Type alias for a list of createable constraints which can be unique, primary key, or foreign key constraints. +""" + + +class ConstraintInfo(TypedDict): + """ + Information about a constraint + + Attributes: + oid: The OID of the constraint. + name: The name of the constraint. + type: The type of the constraint. + columns: List of constrained columns. + referent_table_oid: The OID of the referent table. + referent_columns: List of referent column(s). + """ + oid: int + name: str + type: str + columns: list[int] + referent_table_oid: Optional[int] + referent_columns: Optional[list[int]] + + @classmethod + def from_dict(cls, con_info): + return cls( + oid=con_info["oid"], + name=con_info["name"], + type=con_info["type"], + columns=con_info["columns"], + referent_table_oid=con_info["referent_table_oid"], + referent_columns=con_info["referent_columns"] + ) + + +@rpc_method(name="constraints.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, table_oid: int, database_id: int, **kwargs) -> list[ConstraintInfo]: + """ + List information about constraints in a table. Exposed as `list`. + + Args: + table_oid: The oid of the table to list constraints for. + database_id: The Django id of the database containing the table. + + Returns: + A list of constraint details. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + con_info = get_constraints_for_table(table_oid, conn) + return [ConstraintInfo.from_dict(con) for con in con_info] + + +@rpc_method(name="constraints.add") +@http_basic_auth_login_required +@handle_rpc_exceptions +def add( + *, + table_oid: int, + constraint_def_list: CreatableConstraintInfo, + database_id: int, **kwargs +) -> list[int]: + """ + Add constraint(s) on a table in bulk. + + Args: + table_oid: Identity of the table to delete constraint for. + constraint_def_list: A list describing the constraints to add. + database_id: The Django id of the database containing the table. + + Returns: + The oid(s) of all the constraints on the table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return create_constraint(table_oid, constraint_def_list, conn) + + +@rpc_method(name="constraints.delete") +@http_basic_auth_login_required +@handle_rpc_exceptions +def delete(*, table_oid: int, constraint_oid: int, database_id: int, **kwargs) -> str: + """ + Delete a constraint from a table. + + Args: + table_oid: Identity of the table to delete constraint for. + constraint_oid: The OID of the constraint to delete. + database_id: The Django id of the database containing the table. + + Returns: + The name of the dropped constraint. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return drop_constraint_via_oid(table_oid, constraint_oid, conn) diff --git a/mathesar/rpc/data_modeling.py b/mathesar/rpc/data_modeling.py new file mode 100644 index 0000000000..82dd3d56ca --- /dev/null +++ b/mathesar/rpc/data_modeling.py @@ -0,0 +1,185 @@ +""" +Classes and functions exposed to the RPC endpoint for managing data models. +""" +from typing import TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.links.operations import create as links_create +from db.tables.operations import infer_types, split, move_columns as move_cols +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.utils import connect + + +@rpc_method(name="data_modeling.add_foreign_key_column") +@http_basic_auth_login_required +@handle_rpc_exceptions +def add_foreign_key_column( + *, + column_name: str, + referrer_table_oid: int, + referent_table_oid: int, + database_id: int, + **kwargs +) -> None: + """ + Add a foreign key column to a table. + + The foreign key column will be newly created, and will reference the + `id` column of the referent table. + + Args: + column_name: The name of the column to create. + referrer_table_oid: The OID of the table getting the new column. + referent_table_oid: The OID of the table being referenced. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + links_create.add_foreign_key_column( + conn, column_name, referrer_table_oid, referent_table_oid + ) + + +class MappingColumn(TypedDict): + """ + An object defining a foreign key column in a mapping table. + + Attributes: + column_name: The name of the foreign key column. + referent_table_oid: The OID of the table the column references. + """ + column_name: str + referent_table_oid: int + + +@rpc_method(name="data_modeling.add_mapping_table") +@http_basic_auth_login_required +@handle_rpc_exceptions +def add_mapping_table( + *, + table_name: str, + mapping_columns: list[MappingColumn], + schema_oid: int, + database_id: int, + **kwargs +) -> None: + """ + Add a mapping table to give a many-to-many link between referents. + + The foreign key columns in the mapping table will reference the `id` + column of the referent tables. + + Args: + table_name: The name for the new mapping table. + schema_oid: The OID of the schema for the mapping table. + mapping_columns: The foreign key columns to create in the + mapping table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + links_create.add_mapping_table( + conn, schema_oid, table_name, mapping_columns + ) + + +@rpc_method(name="data_modeling.suggest_types") +@http_basic_auth_login_required +@handle_rpc_exceptions +def suggest_types(*, table_oid: int, database_id: int, **kwargs) -> dict: + """ + Infer the best type for each column in the table. + + Currently we only suggest different types for columns which originate + as type `text`. + + Args: + table_oid: The OID of the table whose columns we're inferring types for. + database_id: The Django id of the database containing the table. + + The response JSON will have attnum keys, and values will be the + result of `format_type` for the inferred type of each column, i.e., the + canonical string referring to the type. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return infer_types.infer_table_column_data_types(conn, table_oid) + + +class SplitTableInfo(TypedDict): + """ + Information about a table, created from column extraction. + + Attributes: + extracted_table_oid: The OID of the table that is created from column extraction. + new_fkey_attnum: The attnum of the newly created foreign key column + referring the extracted_table on the original table. + """ + extracted_table_oid: int + new_fkey_attnum: int + + +@rpc_method(name="data_modeling.split_table") +@http_basic_auth_login_required +@handle_rpc_exceptions +def split_table( + *, + table_oid: int, + column_attnums: list, + extracted_table_name: str, + database_id: int, + relationship_fk_column_name: str = None, + **kwargs +) -> SplitTableInfo: + """ + Extract columns from a table to create a new table, linked by a foreign key. + + Args: + table_oid: The OID of the table whose columns we'll extract. + column_attnums: A list of the attnums of the columns to extract. + extracted_table_name: The name of the new table to be made from the extracted columns. + database_id: The Django id of the database containing the table. + relationship_fk_column_name: The name to give the new foreign key column in the remainder table (optional) + + Returns: + The SplitTableInfo object describing the details for the created table as a result of column extraction. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return split.split_table( + conn, + table_oid, + column_attnums, + extracted_table_name, + relationship_fk_column_name + ) + + +@rpc_method(name="data_modeling.move_columns") +@http_basic_auth_login_required +@handle_rpc_exceptions +def move_columns( + *, + source_table_oid: int, + target_table_oid: int, + move_column_attnums: list[int], + database_id: int, + **kwargs +) -> None: + """ + Extract columns from a table to a referent table, linked by a foreign key. + + Args: + source_table_oid: The OID of the source table whose column(s) we'll extract. + target_table_oid: The OID of the target table where the extracted column(s) will be added. + move_column_attnums: The list of attnum(s) to move from source table to the target table. + database_id: The Django id of the database containing the table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + move_cols.move_columns_to_referenced_table( + conn, + source_table_oid, + target_table_oid, + move_column_attnums + ) diff --git a/mathesar/rpc/databases/__init__.py b/mathesar/rpc/databases/__init__.py new file mode 100644 index 0000000000..4b40b38c84 --- /dev/null +++ b/mathesar/rpc/databases/__init__.py @@ -0,0 +1 @@ +from .base import * # noqa diff --git a/mathesar/rpc/databases/base.py b/mathesar/rpc/databases/base.py new file mode 100644 index 0000000000..fa68060e34 --- /dev/null +++ b/mathesar/rpc/databases/base.py @@ -0,0 +1,72 @@ +from typing import Literal, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from mathesar.rpc.utils import connect +from db.databases.operations.select import get_database +from db.databases.operations.drop import drop_database +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions + + +class DatabaseInfo(TypedDict): + """ + Information about a database current user privileges on it. + + Attributes: + oid: The `oid` of the database on the server. + name: The name of the database on the server. + owner_oid: The `oid` of the owner of the database. + current_role_priv: A list of privileges available to the user. + current_role_owns: Whether the user is an owner of the database. + """ + oid: int + name: str + owner_oid: int + current_role_priv: list[Literal['CONNECT', 'CREATE', 'TEMPORARY']] + current_role_owns: bool + + @classmethod + def from_dict(cls, d): + return cls( + oid=d["oid"], + name=d["name"], + owner_oid=d["owner_oid"], + current_role_priv=d["current_role_priv"], + current_role_owns=d["current_role_owns"] + ) + + +@rpc_method(name="databases.get") +@http_basic_auth_login_required +@handle_rpc_exceptions +def get(*, database_id: int, **kwargs) -> DatabaseInfo: + """ + Get information about a database. + + Args: + database_id: The Django id of the database. + + Returns: + Information about the database, and the current user privileges. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + db_info = get_database(conn) + return DatabaseInfo.from_dict(db_info) + + +@rpc_method(name="databases.delete") +@http_basic_auth_login_required +@handle_rpc_exceptions +def delete(*, database_oid: int, database_id: int, **kwargs) -> None: + """ + Drop a database from the server. + + Args: + database_oid: The OID of the database to delete on the database. + database_id: The Django id of the database to connect to. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + drop_database(database_oid, conn) diff --git a/mathesar/rpc/databases/configured.py b/mathesar/rpc/databases/configured.py new file mode 100644 index 0000000000..d2eceed9bb --- /dev/null +++ b/mathesar/rpc/databases/configured.py @@ -0,0 +1,74 @@ +from typing import TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from mathesar.models.base import Database +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions + + +class ConfiguredDatabaseInfo(TypedDict): + """ + Information about a database. + + Attributes: + id: the Django ID of the database model instance. + name: The name of the database on the server. + server_id: the Django ID of the server model instance for the database. + """ + id: int + name: str + server_id: int + + @classmethod + def from_model(cls, model): + return cls( + id=model.id, + name=model.name, + server_id=model.server.id + ) + + +@rpc_method(name="databases.configured.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, server_id: int = None, **kwargs) -> list[ConfiguredDatabaseInfo]: + """ + List information about databases for a server. Exposed as `list`. + + If called with no `server_id`, all databases for all servers are listed. + + Args: + server_id: The Django id of the server containing the databases. + + Returns: + A list of database details. + """ + user = kwargs.get(REQUEST_KEY).user + if user.is_superuser: + database_qs = Database.objects.filter( + server__id=server_id + ) if server_id is not None else Database.objects.all() + else: + database_qs = Database.objects.filter( + server__id=server_id, + userdatabaserolemap__user=user + ) if server_id is not None else Database.objects.filter( + userdatabaserolemap__user=user + ) + + return [ConfiguredDatabaseInfo.from_model(db_model) for db_model in database_qs] + + +@rpc_method(name="databases.configured.disconnect") +@http_basic_auth_login_required +@handle_rpc_exceptions +def disconnect(*, database_id: int, **kwargs) -> None: + """ + Disconnect a configured database. + + Args: + database_id: The Django id of the database. + """ + database_qs = Database.objects.get(id=database_id) + database_qs.delete() diff --git a/mathesar/rpc/databases/privileges.py b/mathesar/rpc/databases/privileges.py new file mode 100644 index 0000000000..6dd5f88a5c --- /dev/null +++ b/mathesar/rpc/databases/privileges.py @@ -0,0 +1,108 @@ +from typing import Literal, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.roles.operations.select import list_db_priv +from db.roles.operations.update import replace_database_privileges_for_roles +from db.roles.operations.ownership import transfer_database_ownership +from mathesar.rpc.databases.base import DatabaseInfo +from mathesar.rpc.utils import connect +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions + + +class DBPrivileges(TypedDict): + """ + Information about database privileges. + + Attributes: + role_oid: The `oid` of the role on the database server. + direct: A list of database privileges for the afforementioned role_oid. + """ + role_oid: int + direct: list[Literal['CONNECT', 'CREATE', 'TEMPORARY']] + + @classmethod + def from_dict(cls, d): + return cls( + role_oid=d["role_oid"], + direct=d["direct"] + ) + + +@rpc_method(name="databases.privileges.list_direct") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_direct(*, database_id: int, **kwargs) -> list[DBPrivileges]: + """ + List database privileges for non-inherited roles. + + Args: + database_id: The Django id of the database. + + Returns: + A list of database privileges. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_db_priv = list_db_priv(conn) + return [DBPrivileges.from_dict(i) for i in raw_db_priv] + + +@rpc_method(name="databases.privileges.replace_for_roles") +@http_basic_auth_login_required +@handle_rpc_exceptions +def replace_for_roles( + *, privileges: list[DBPrivileges], database_id: int, **kwargs +) -> list[DBPrivileges]: + """ + Replace direct database privileges for roles. + + Possible privileges are `CONNECT`, `CREATE`, and `TEMPORARY`. + + Only roles which are included in a passed `DBPrivileges` object are + affected. + + WARNING: Any privilege included in the `direct` list for a role + is GRANTed, and any privilege not included is REVOKEd. + + Attributes: + privileges: The new privilege sets for roles. + database_id: The Django id of the database. + + Returns: + A list of all non-default privileges on the database after the + operation. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_db_priv = replace_database_privileges_for_roles( + conn, [DBPrivileges.from_dict(i) for i in privileges] + ) + return [DBPrivileges.from_dict(i) for i in raw_db_priv] + + +@rpc_method(name="databases.privileges.transfer_ownership") +@http_basic_auth_login_required +@handle_rpc_exceptions +def transfer_ownership(*, new_owner_oid: int, database_id: int, **kwargs) -> DatabaseInfo: + """ + Transfers ownership of the current database to a new owner. + + Attributes: + new_owner_oid: The OID of the role whom we want to be the new owner of the current database. + database_id: The Django id of the database whose ownership is to be transferred. + + Note: To successfully transfer ownership of a database to a new owner the current user must: + - Be a Superuser/Owner of the current database. + - Be a `MEMBER` of the new owning role. i.e. The current role should be able to `SET ROLE` + to the new owning role. + - Have `CREATEDB` privilege. + + Returns: + Information about the database, and the current user privileges. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + db_info = transfer_database_ownership(new_owner_oid, conn) + return DatabaseInfo.from_dict(db_info) diff --git a/mathesar/rpc/databases/setup.py b/mathesar/rpc/databases/setup.py new file mode 100644 index 0000000000..d423d3438e --- /dev/null +++ b/mathesar/rpc/databases/setup.py @@ -0,0 +1,102 @@ +""" +RPC functions for setting up database connections. +""" +from typing import TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_superuser_required + +from mathesar.utils import permissions +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.servers.configured import ConfiguredServerInfo +from mathesar.rpc.databases.configured import ConfiguredDatabaseInfo +from mathesar.rpc.roles.configured import ConfiguredRoleInfo + + +class DatabaseConnectionResult(TypedDict): + """ + Info about the objects resulting from calling the setup functions. + + These functions will get or create an instance of the Server, + Database, and ConfiguredRole models, as well as a UserDatabaseRoleMap entry. + + Attributes: + server: Information on the Server model instance. + database: Information on the Database model instance. + configured_role: Information on the ConfiguredRole model instance. + """ + server: ConfiguredServerInfo + database: ConfiguredDatabaseInfo + configured_role: ConfiguredRoleInfo + + @classmethod + def from_model(cls, model): + return cls( + server=ConfiguredServerInfo.from_model(model.server), + database=ConfiguredDatabaseInfo.from_model(model.database), + configured_role=ConfiguredRoleInfo.from_model(model.configured_role), + ) + + +@rpc_method(name='databases.setup.create_new') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def create_new( + *, + database: str, + sample_data: list[str] = [], + **kwargs +) -> DatabaseConnectionResult: + """ + Set up a new database on the internal server. + + The calling user will get access to that database using the default + role stored in Django settings. + + Args: + database: The name of the new database. + sample_data: A list of strings requesting that some example data + sets be installed on the underlying database. Valid list + members are 'library_management' and 'movie_collection'. + """ + user = kwargs.get(REQUEST_KEY).user + result = permissions.set_up_new_database_for_user_on_internal_server( + database, user, sample_data=sample_data + ) + return DatabaseConnectionResult.from_model(result) + + +@rpc_method(name='databases.setup.connect_existing') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def connect_existing( + *, + host: str, + port: int, + database: str, + role: str, + password: str, + sample_data: list[str] = [], + **kwargs +) -> DatabaseConnectionResult: + """ + Connect Mathesar to an existing database on a server. + + The calling user will get access to that database using the + credentials passed to this function. + + Args: + host: The host of the database server. + port: The port of the database server. + database: The name of the database on the server. + role: The role on the server to use for the connection. + password: A password valid for the role. + sample_data: A list of strings requesting that some example data + sets be installed on the underlying database. Valid list + members are 'library_management' and 'movie_collection'. + """ + user = kwargs.get(REQUEST_KEY).user + result = permissions.set_up_preexisting_database_for_user( + host, port, database, role, password, user, sample_data=sample_data + ) + return DatabaseConnectionResult.from_model(result) diff --git a/mathesar/rpc/exceptions/error_codes.py b/mathesar/rpc/exceptions/error_codes.py index 52b4925196..af25368002 100644 --- a/mathesar/rpc/exceptions/error_codes.py +++ b/mathesar/rpc/exceptions/error_codes.py @@ -516,7 +516,7 @@ def get_error_code(err: Exception) -> int: "DictHasBadKeys": -28007, "DistinctColumnRequiredAPIException": -28008, "DoesNotExist": -28009, - "DuplicateUIQueryInSchemaAPIException": -28010, + "DuplicateExplorationInSchemaAPIException": -28010, "EditingPublicSchemaIsDisallowed": -28011, "GenericAPIException": -28012, "IncompatibleFractionDigitValuesAPIException": -28013, diff --git a/mathesar/rpc/exceptions/handlers.py b/mathesar/rpc/exceptions/handlers.py index 1a45d38d25..62203230f8 100644 --- a/mathesar/rpc/exceptions/handlers.py +++ b/mathesar/rpc/exceptions/handlers.py @@ -8,6 +8,8 @@ def handle_rpc_exceptions(f): """Wrap a function to process any Exception raised.""" + f.rpc_exceptions_handled = True + @wraps(f) def safe_func(*args, **kwargs): try: diff --git a/mathesar/rpc/explorations.py b/mathesar/rpc/explorations.py new file mode 100644 index 0000000000..c5a9786218 --- /dev/null +++ b/mathesar/rpc/explorations.py @@ -0,0 +1,257 @@ +""" +Classes and functions exposed to the RPC endpoint for managing explorations. +""" +from typing import Optional, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from mathesar.models.base import Explorations +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.utils import connect +from mathesar.utils.explorations import ( + list_explorations, + get_exploration, + delete_exploration, + run_exploration, + run_saved_exploration, + replace_exploration, + create_exploration +) + + +class ExplorationInfo(TypedDict): + """ + Information about an exploration. + + Attributes: + id: The Django id of an exploration. + database_id: The Django id of the database containing the exploration. + name: The name of the exploration. + base_table_oid: The OID of the base table of the exploration on the database. + schema_oid: The OID of the schema containing the base table of the exploration. + initial_columns: A list describing the columns to be included in the exploration. + transformations: A list describing the transformations to be made on the included columns. + display_options: A list describing metadata for the columns in the explorations. + display_names: A map between the actual column names on the database and the alias to be displayed(if any). + description: The description of the exploration. + """ + id: int + database_id: int + name: str + base_table_oid: int + schema_oid: int + initial_columns: list + transformations: Optional[list] + display_options: Optional[list] + display_names: Optional[dict] + description: Optional[str] + + @classmethod + def from_model(cls, model): + return cls( + id=model.id, + database_id=model.database.id, + name=model.name, + base_table_oid=model.base_table_oid, + schema_oid=model.schema_oid, + initial_columns=model.initial_columns, + transformations=model.transformations, + display_options=model.display_options, + display_names=model.display_names, + description=model.description, + ) + + +class ExplorationDef(TypedDict): + """ + Definition about a runnable exploration. + + Attributes: + database_id: The Django id of the database containing the exploration. + name: The name of the exploration. + base_table_oid: The OID of the base table of the exploration on the database. + schema_oid: The OID of the schema containing the base table of the exploration. + initial_columns: A list describing the columns to be included in the exploration. + transformations: A list describing the transformations to be made on the included columns. + display_options: A list describing metadata for the columns in the explorations. + display_names: A map between the actual column names on the database and the alias to be displayed(if any). + description: The description of the exploration. + """ + database_id: int + name: str + base_table_oid: int + schema_oid: int + initial_columns: list + transformations: Optional[list] + display_options: Optional[list] + display_names: Optional[dict] + description: Optional[str] + + +class ExplorationResult(TypedDict): + """ + Result of an exploration run. + + Attributes: + query: A dict describing the exploration that ran. + records: A dict describing the total count of records along with the contents of those records. + output_columns: A tuple describing the names of the columns included in the exploration. + column_metadata: A dict describing the metadata applied to included columns. + limit: Specifies the max number of rows returned.(default 100) + offset: Specifies the number of rows skipped.(default 0) + filter: A dict describing filters applied to an exploration. + order_by: The ordering applied to the columns of an exploration. + search: Specifies a list of dicts containing column names and searched expression. + duplicate_only: A list of column names for which you want duplicate records. + """ + query: dict + records: dict + output_columns: tuple + column_metadata: dict + limit: Optional[int] + offset: Optional[int] + filter: Optional[dict] + order_by: Optional[list[dict]] + search: Optional[list[dict]] + duplicate_only: Optional[list] + + @classmethod + def from_dict(cls, e): + return cls( + query=e["query"], + records=e["records"], + output_columns=e["output_columns"], + column_metadata=e["column_metadata"], + limit=e.get("limit", None), + offset=e.get("offset", None), + filter=e.get("filter", None), + order_by=e.get("order_by", None), + search=e.get("search", None), + duplicate_only=e.get("duplicate_only", None), + ) + + +@rpc_method(name="explorations.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, database_id: int, schema_oid: int = None, **kwargs) -> list[ExplorationInfo]: + """ + List information about explorations for a database. Exposed as `list`. + + Args: + database_id: The Django id of the database containing the explorations. + schema_oid: The OID of the schema containing the base table(s) of the exploration(s).(optional) + + Returns: + A list of exploration details. + """ + explorations = list_explorations(database_id, schema_oid) + return [ExplorationInfo.from_model(exploration) for exploration in explorations] + + +@rpc_method(name="explorations.get") +@http_basic_auth_login_required +@handle_rpc_exceptions +def get(*, exploration_id: int, **kwargs) -> ExplorationInfo: + """ + List information about an exploration. + + Args: + exploration_id: The Django id of the exploration. + + Returns: + Exploration details for a given exploration_id. + """ + exploration = get_exploration(exploration_id) + return ExplorationInfo.from_model(exploration) + + +@rpc_method(name="explorations.delete") +@http_basic_auth_login_required +@handle_rpc_exceptions +def delete(*, exploration_id: int, **kwargs) -> None: + """ + Delete an exploration. + + Args: + exploration_id: The Django id of the exploration to delete. + """ + delete_exploration(exploration_id) + + +@rpc_method(name="explorations.run") +@http_basic_auth_login_required +@handle_rpc_exceptions +def run(*, exploration_def: ExplorationDef, limit: int = 100, offset: int = 0, **kwargs) -> ExplorationResult: + """ + Run an exploration. + + Args: + exploration_def: A dict describing an exploration to run. + limit: The max number of rows to return.(default 100) + offset: The number of rows to skip.(default 0) + + Returns: + The result of the exploration run. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(exploration_def['database_id'], user) as conn: + exploration_result = run_exploration(exploration_def, conn, limit, offset) + return ExplorationResult.from_dict(exploration_result) + + +@rpc_method(name='explorations.run_saved') +@http_basic_auth_login_required +@handle_rpc_exceptions +def run_saved(*, exploration_id: int, limit: int = 100, offset: int = 0, **kwargs) -> ExplorationResult: + """ + Run a saved exploration. + + Args: + exploration_id: The Django id of the exploration to run. + limit: The max number of rows to return.(default 100) + offset: The number of rows to skip.(default 0) + + Returns: + The result of the exploration run. + """ + user = kwargs.get(REQUEST_KEY).user + exp_model = Explorations.objects.get(id=exploration_id) + with connect(exp_model.database.id, user) as conn: + exploration_result = run_saved_exploration(exp_model, limit, offset, conn) + return ExplorationResult.from_dict(exploration_result) + + +@rpc_method(name='explorations.replace') +@http_basic_auth_login_required +@handle_rpc_exceptions +def replace(*, new_exploration: ExplorationInfo) -> ExplorationInfo: + """ + Replace a saved exploration. + + Args: + new_exploration: A dict describing the exploration to replace, including the updated fields. + + Returns: + The exploration details for the replaced exploration. + """ + replaced_exp_model = replace_exploration(new_exploration) + return ExplorationInfo.from_model(replaced_exp_model) + + +@rpc_method(name='explorations.add') +@http_basic_auth_login_required +@handle_rpc_exceptions +def add(*, exploration_def: ExplorationDef) -> ExplorationInfo: + """ + Add a new exploration. + + Args: + exploration_def: A dict describing the exploration to create. + + Returns: + The exploration details for the newly created exploration. + """ + exp_model = create_exploration(exploration_def) + return ExplorationInfo.from_model(exp_model) diff --git a/mathesar/rpc/records.py b/mathesar/rpc/records.py new file mode 100644 index 0000000000..8558b67b6c --- /dev/null +++ b/mathesar/rpc/records.py @@ -0,0 +1,435 @@ +""" +Classes and functions exposed to the RPC endpoint for managing table records. +""" +from typing import Any, Literal, Optional, TypedDict, Union + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.records.operations import delete as record_delete +from db.records.operations import insert as record_insert +from db.records.operations import select as record_select +from db.records.operations import update as record_update +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.utils import connect + + +class OrderBy(TypedDict): + """ + An object defining an `ORDER BY` clause. + + Attributes: + attnum: The attnum of the column to order by. + direction: The direction to order by. + """ + attnum: int + direction: Literal["asc", "desc"] + + +class FilterAttnum(TypedDict): + """ + An object choosing a column for a filter. + + Attributes: + type: Must be `"attnum"` + value: The attnum of the column to filter by + """ + type: Literal["attnum"] + value: int + + +class FilterLiteral(TypedDict): + """ + An object defining a literal for an argument to a filter. + + Attributes: + type: must be `"literal"`. + value: The value of the literal. + """ + type: Literal["literal"] + value: Any + + +class Filter(TypedDict): + """ + An object defining a filter to be used in a `WHERE` clause. + + For valid `type` values, see the `msar.filter_templates` table + defined in `mathesar/db/sql/00_msar.sql`. + + Attributes: + type: a function or operator to be used in filtering. + args: The ordered arguments for the function or operator. + """ + type: str + args: list[Union['Filter', FilterAttnum, FilterLiteral]] + + +class SearchParam(TypedDict): + """ + Search definition for a single column. + + Attributes: + attnum: The attnum of the column in the table. + literal: The literal to search for in the column. + """ + attnum: int + literal: Any + + +class Grouping(TypedDict): + """ + Grouping definition. + + The table involved must have a single column primary key. + + Attributes: + columns: The columns to be grouped by. + preproc: The preprocessing funtions to apply (if any). + """ + columns: list[int] + preproc: list[str] + + +class Group(TypedDict): + """ + Group definition. + + Note that the `count` is over all rows in the group, whether returned + or not. However, `result_indices` is restricted to only the rows + returned. This is to avoid potential problems if there are many rows + in the group (e.g., the whole table), but we only return a few. + + Attributes: + id: The id of the group. Consistent for same input. + count: The number of items in the group. + results_eq: The value the results of the group equal. + result_indices: The 0-indexed positions of group members in the + results array. + """ + id: int + count: int + results_eq: list[dict] + result_indices: list[int] + + +class GroupingResponse(TypedDict): + """ + Grouping response object. Extends Grouping with actual groups. + + Attributes: + columns: The columns to be grouped by. + preproc: The preprocessing funtions to apply (if any). + groups: The groups applicable to the records being returned. + """ + columns: list[int] + preproc: list[str] + groups: list[Group] + + +class RecordList(TypedDict): + """ + Records from a table, along with some meta data + + The form of the objects in the `results` array is determined by the + underlying records being listed. The keys of each object are the + attnums of the retrieved columns. The values are the value for the + given row, for the given column. + + Attributes: + count: The total number of records in the table. + results: An array of record objects. + grouping: Information for displaying grouped records. + linked_record_smmaries: Information for previewing foreign key + values, provides a map of foreign key to a text summary. + record_summaries: Information for previewing returned records. + """ + count: int + results: list[dict] + grouping: GroupingResponse + linked_record_summaries: dict[str, dict[str, str]] + record_summaries: dict[str, str] + query: str + + @classmethod + def from_dict(cls, d): + return cls( + count=d["count"], + results=d["results"], + grouping=d.get("grouping"), + linked_record_summaries=d.get("linked_record_summaries"), + record_summaries=d.get("record_summaries"), + query=d["query"], + ) + + +class RecordAdded(TypedDict): + """ + Record from a table, along with some meta data + + The form of the object in the `results` array is determined by the + underlying records being listed. The keys of each object are the + attnums of the retrieved columns. The values are the value for the + given row, for the given column. + + Attributes: + results: An array of a single record objects (the one added). + linked_record_summaries: Information for previewing foreign key + values, provides a map of foreign key to a text summary. + record_summaries: Information for previewing an added record. + """ + results: list[dict] + linked_record_summaries: dict[str, dict[str, str]] + record_summaries: dict[str, str] + + @classmethod + def from_dict(cls, d): + return cls( + results=d["results"], + linked_record_summaries=d.get("linked_record_summaries"), + record_summaries=d.get("record_summaries"), + ) + + +@rpc_method(name="records.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_( + *, + table_oid: int, + database_id: int, + limit: int = None, + offset: int = None, + order: list[OrderBy] = None, + filter: Filter = None, + grouping: Grouping = None, + return_record_summaries: bool = False, + **kwargs +) -> RecordList: + """ + List records from a table, and its row count. Exposed as `list`. + + Args: + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + limit: The maximum number of rows we'll return. + offset: The number of rows to skip before returning records from + following rows. + order: An array of ordering definition objects. + filter: An array of filter definition objects. + grouping: An array of group definition objects. + return_record_summaries: Whether to return summaries of retrieved + records. + + Returns: + The requested records, along with some metadata. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + record_info = record_select.list_records_from_table( + conn, + table_oid, + limit=limit, + offset=offset, + order=order, + filter=filter, + group=grouping, + return_record_summaries=return_record_summaries, + ) + return RecordList.from_dict(record_info) + + +@rpc_method(name="records.get") +@http_basic_auth_login_required +@handle_rpc_exceptions +def get( + *, + record_id: Any, + table_oid: int, + database_id: int, + return_record_summaries: bool = False, + **kwargs +) -> RecordList: + """ + Get single record from a table by its primary key. + + Args: + record_id: The primary key value of the record to be gotten. + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + return_record_summaries: Whether to return summaries of the + retrieved record. + + Returns: + The requested record, along with some metadata. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + record_info = record_select.get_record_from_table( + conn, + record_id, + table_oid, + return_record_summaries=return_record_summaries + ) + return RecordList.from_dict(record_info) + + +@rpc_method(name="records.add") +@http_basic_auth_login_required +@handle_rpc_exceptions +def add( + *, + record_def: dict, + table_oid: int, + database_id: int, + return_record_summaries: bool = False, + **kwargs +) -> RecordAdded: + """ + Add a single record to a table. + + The form of the `record_def` is determined by the underlying table. + Keys should be attnums, and values should be the desired value for + that column in the created record. Missing keys will use default + values (if set on the DB), and explicit `null` values will set null + for that value regardless of default (with obvious exceptions where + that would violate some constraint) + + Args: + record_def: An object representing the record to be added. + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + return_record_summaries: Whether to return summaries of the added + record. + + Returns: + The created record, along with some metadata. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + record_info = record_insert.add_record_to_table( + conn, + record_def, + table_oid, + return_record_summaries=return_record_summaries, + ) + return RecordAdded.from_dict(record_info) + + +@rpc_method(name="records.patch") +@http_basic_auth_login_required +@handle_rpc_exceptions +def patch( + *, + record_def: dict, + record_id: Any, + table_oid: int, + database_id: int, + return_record_summaries: bool = False, + **kwargs +) -> RecordAdded: + """ + Modify a record in a table. + + The form of the `record_def` is determined by the underlying table. + Keys should be attnums, and values should be the desired value for + that column in the modified record. Explicit `null` values will set + null for that value (with obvious exceptions where that would violate + some constraint). + + Args: + record_def: An object representing the record to be added. + record_id: The primary key value of the record to modify. + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + return_record_summaries: Whether to return summaries of the + modified record. + + Returns: + The modified record, along with some metadata. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + record_info = record_update.patch_record_in_table( + conn, + record_def, + record_id, + table_oid, + return_record_summaries=return_record_summaries, + ) + return RecordAdded.from_dict(record_info) + + +@rpc_method(name="records.delete") +@http_basic_auth_login_required +@handle_rpc_exceptions +def delete( + *, + record_ids: list[Any], + table_oid: int, + database_id: int, + **kwargs +) -> Optional[int]: + """ + Delete records from a table by primary key. + + Args: + record_ids: The primary key values of the records to be deleted. + table_oid: The identity of the table in the user's database. + database_id: The Django id of the database containing the table. + + Returns: + The number of records deleted. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + num_deleted = record_delete.delete_records_from_table( + conn, + record_ids, + table_oid, + ) + return num_deleted + + +@rpc_method(name="records.search") +@http_basic_auth_login_required +@handle_rpc_exceptions +def search( + *, + table_oid: int, + database_id: int, + search_params: list[SearchParam] = [], + limit: int = 10, + return_record_summaries: bool = False, + **kwargs +) -> RecordList: + """ + List records from a table according to `search_params`. + + + Literals will be searched for in a basic way in string-like columns, + but will have to match exactly in non-string-like columns. + + Records are assigned a score based on how many matches, and of what + quality, they have with the passed search parameters. + + Args: + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + search_params: Results are ranked and filtered according to the + objects passed here. + limit: The maximum number of rows we'll return. + + Returns: + The requested records, along with some metadata. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + record_info = record_select.search_records_from_table( + conn, + table_oid, + search=search_params, + limit=limit, + return_record_summaries=return_record_summaries, + ) + return RecordList.from_dict(record_info) diff --git a/mathesar/rpc/roles/__init__.py b/mathesar/rpc/roles/__init__.py new file mode 100644 index 0000000000..44c8b96840 --- /dev/null +++ b/mathesar/rpc/roles/__init__.py @@ -0,0 +1 @@ +from .base import * # noqa diff --git a/mathesar/rpc/roles/base.py b/mathesar/rpc/roles/base.py new file mode 100644 index 0000000000..167359c529 --- /dev/null +++ b/mathesar/rpc/roles/base.py @@ -0,0 +1,192 @@ +""" +Classes and functions exposed to the RPC endpoint for managing table columns. +""" +from typing import Optional, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.utils import connect +from db.roles.operations.select import list_roles, get_current_role_from_db +from db.roles.operations.create import create_role +from db.roles.operations.drop import drop_role +from db.roles.operations.membership import set_members_to_role + + +class RoleMember(TypedDict): + """ + Information about a member role of a directly inherited role. + + Attributes: + oid: The OID of the member role. + admin: Whether the member role has ADMIN option on the inherited role. + """ + oid: int + admin: bool + + +class RoleInfo(TypedDict): + """ + Information about a role. + + Attributes: + oid: The OID of the role. + name: Name of the role. + super: Whether the role has SUPERUSER status. + inherits: Whether the role has INHERIT attribute. + create_role: Whether the role has CREATEROLE attribute. + create_db: Whether the role has CREATEDB attribute. + login: Whether the role has LOGIN attribute. + description: A description of the role + members: The member roles that directly inherit the role. + + Refer PostgreSQL documenation on: + - [pg_roles table](https://www.postgresql.org/docs/current/view-pg-roles.html). + - [Role attributes](https://www.postgresql.org/docs/current/role-attributes.html) + - [Role membership](https://www.postgresql.org/docs/current/role-membership.html) + """ + oid: int + name: str + super: bool + inherits: bool + create_role: bool + create_db: bool + login: bool + description: Optional[str] + members: Optional[list[RoleMember]] + + @classmethod + def from_dict(cls, d): + return cls( + oid=d["oid"], + name=d["name"], + super=d["super"], + inherits=d["inherits"], + create_role=d["create_role"], + create_db=d["create_db"], + login=d["login"], + description=d["description"], + members=d["members"] + ) + + +@rpc_method(name="roles.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, database_id: int, **kwargs) -> list[RoleInfo]: + """ + List information about roles for a database server. Exposed as `list`. + Requires a database id inorder to connect to the server. + + Args: + database_id: The Django id of the database. + + Returns: + A list of roles present on the database server. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + roles = list_roles(conn) + return [RoleInfo.from_dict(role) for role in roles] + + +@rpc_method(name="roles.add") +@http_basic_auth_login_required +@handle_rpc_exceptions +def add( + *, + rolename: str, + database_id: int, + password: str = None, + login: bool = None, + **kwargs +) -> RoleInfo: + """ + Add a new login/non-login role on a database server. + + Args: + rolename: The name of the role to be created. + database_id: The Django id of the database. + password: The password for the rolename to set. + login: Whether the role to be created could login. + + Returns: + A dict describing the created role. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + role = create_role(rolename, password, login, conn) + return RoleInfo.from_dict(role) + + +@rpc_method(name="roles.delete") +@http_basic_auth_login_required +@handle_rpc_exceptions +def delete( + *, + role_oid: int, + database_id: int, + **kwargs +) -> None: + """ + Drop a role on a database server. + + Args: + role_oid: The OID of the role to drop on the database. + database_id: The Django id of the database. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + drop_role(role_oid, conn) + + +@rpc_method(name="roles.get_current_role") +@http_basic_auth_login_required +@handle_rpc_exceptions +def get_current_role(*, database_id: int, **kwargs) -> dict: + """ + Get information about the current role and all the parent role(s) whose + privileges are immediately available to current role without doing SET ROLE. + + Args: + database_id: The Django id of the database. + + Returns: + A dict describing the current role. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + current_role = get_current_role_from_db(conn) + return { + "current_role": RoleInfo.from_dict(current_role["current_role"]), + "parent_roles": [RoleInfo.from_dict(role) for role in current_role["parent_roles"]] + } + + +@rpc_method(name="roles.set_members") +@http_basic_auth_login_required +@handle_rpc_exceptions +def set_members( + *, + parent_role_oid: int, + members: list, + database_id: int, + **kwargs +) -> RoleInfo: + """ + Grant/Revoke direct membership to/from roles. + + Args: + parent_role_oid: The OID of role whose membership will be granted/revoked to/from other roles. + members: An array of role OID(s) whom we want to grant direct membership of the parent role. + Only the OID(s) present in the array will be granted membership of parent role, + Membership will be revoked for existing members not present in this array. + + Returns: + A dict describing the updated information of the parent role. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + parent_role = set_members_to_role(parent_role_oid, members, conn) + return RoleInfo.from_dict(parent_role) diff --git a/mathesar/rpc/roles/configured.py b/mathesar/rpc/roles/configured.py new file mode 100644 index 0000000000..8c92444127 --- /dev/null +++ b/mathesar/rpc/roles/configured.py @@ -0,0 +1,112 @@ +from typing import TypedDict + +from modernrpc.core import rpc_method +from modernrpc.auth.basic import http_basic_auth_login_required, http_basic_auth_superuser_required + +from mathesar.models.base import ConfiguredRole, Server +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions + + +class ConfiguredRoleInfo(TypedDict): + """ + Information about a role configured in Mathesar. + + Attributes: + id: the Django ID of the ConfiguredRole model instance. + name: The name of the role. + server_id: The Django ID of the Server model instance for the role. + """ + id: int + name: str + server_id: int + + @classmethod + def from_model(cls, model): + return cls( + id=model.id, + name=model.name, + server_id=model.server.id + ) + + +@rpc_method(name="roles.configured.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, server_id: int, **kwargs) -> list[ConfiguredRoleInfo]: + """ + List information about roles configured in Mathesar. Exposed as `list`. + + Args: + server_id: The Django id of the Server containing the configured roles. + + Returns: + A list of configured roles. + """ + configured_role_qs = ConfiguredRole.objects.filter(server__id=server_id) + + return [ConfiguredRoleInfo.from_model(db_model) for db_model in configured_role_qs] + + +@rpc_method(name='roles.configured.add') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def add( + *, + server_id: int, + name: str, + password: str, + **kwargs +) -> ConfiguredRoleInfo: + """ + Configure a role in Mathesar for a database server. + + Args: + server_id: The Django id of the Server to contain the configured role. + name: The name of the role. + password: The password for the role. + + Returns: + The newly configured role. + """ + server = Server.objects.get(id=server_id) + configured_role = ConfiguredRole.objects.create( + server=server, + name=name, + password=password + ) + return ConfiguredRoleInfo.from_model(configured_role) + + +@rpc_method(name='roles.configured.delete') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def delete(*, configured_role_id: int, **kwargs): + """ + Delete a configured role for a server. + + Args: + configured_role_id: The Django id of the ConfiguredRole model instance. + """ + configured_role = ConfiguredRole.objects.get(id=configured_role_id) + configured_role.delete() + + +@rpc_method(name='roles.configured.set_password') +@http_basic_auth_superuser_required +@handle_rpc_exceptions +def set_password( + *, + configured_role_id: int, + password: str, + **kwargs +): + """ + Set the password of a configured role for a server. + + Args: + configured_role_id: The Django id of the ConfiguredRole model instance. + password: The password for the role. + """ + configured_role = ConfiguredRole.objects.get(id=configured_role_id) + configured_role.password = password + configured_role.save() diff --git a/mathesar/rpc/schemas/__init__.py b/mathesar/rpc/schemas/__init__.py new file mode 100644 index 0000000000..44c8b96840 --- /dev/null +++ b/mathesar/rpc/schemas/__init__.py @@ -0,0 +1 @@ +from .base import * # noqa diff --git a/mathesar/rpc/schemas/base.py b/mathesar/rpc/schemas/base.py new file mode 100644 index 0000000000..c3d91c4eba --- /dev/null +++ b/mathesar/rpc/schemas/base.py @@ -0,0 +1,156 @@ +""" +Classes and functions exposed to the RPC endpoint for managing schemas. +""" +from typing import Literal, Optional, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.constants import INTERNAL_SCHEMAS +from db.schemas.operations.create import create_schema +from db.schemas.operations.select import list_schemas, get_schema +from db.schemas.operations.drop import drop_schema_via_oid +from db.schemas.operations.alter import patch_schema +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.utils import connect + + +class SchemaInfo(TypedDict): + """ + Information about a schema + + Attributes: + oid: The OID of the schema + name: The name of the schema + description: A description of the schema + owner_oid: The OID of the owner of the schema + current_role_priv: All privileges available to the calling role + on the schema. + current_role_owns: Whether the current role is the owner of the + schema (even indirectly). + table_count: The number of tables in the schema + """ + oid: int + name: str + description: Optional[str] + owner_oid: int + current_role_priv: list[Literal['USAGE', 'CREATE']] + current_role_owns: bool + table_count: int + + +class SchemaPatch(TypedDict): + """ + Attributes: + name: The name of the schema + description: A description of the schema + """ + name: Optional[str] + description: Optional[str] + + +@rpc_method(name="schemas.add") +@http_basic_auth_login_required +@handle_rpc_exceptions +def add( + *, + name: str, + database_id: int, + owner_oid: int = None, + description: Optional[str] = None, + **kwargs, +) -> SchemaInfo: + """ + Add a schema + + Args: + name: The name of the schema to add. + database_id: The Django id of the database containing the schema. + owner_oid: The OID of the role who will own the new schema. + If owner_oid is None, the current role will be the owner of the new schema. + description: A description of the schema + + Returns: + The SchemaInfo describing the user-defined schema in the database. + """ + with connect(database_id, kwargs.get(REQUEST_KEY).user) as conn: + return create_schema( + schema_name=name, + conn=conn, + owner_oid=owner_oid, + description=description + ) + + +@rpc_method(name="schemas.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, database_id: int, **kwargs) -> list[SchemaInfo]: + """ + List information about schemas in a database. Exposed as `list`. + + Args: + database_id: The Django id of the database containing the table. + + Returns: + A list of SchemaInfo objects + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + schemas = list_schemas(conn) + + return [s for s in schemas if s['name'] not in INTERNAL_SCHEMAS] + + +@rpc_method(name="schemas.get") +@http_basic_auth_login_required +@handle_rpc_exceptions +def get(*, schema_oid: int, database_id: int, **kwargs) -> SchemaInfo: + """ + Get information about a schema in a database. + + Args: + schema_oid: The OID of the schema to get. + database_id: The Django id of the database containing the table. + + Returns: + The SchemaInfo describing the user-defined schema in the database. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + schema_info = get_schema(conn) + return schema_info + + +@rpc_method(name="schemas.delete") +@http_basic_auth_login_required +@handle_rpc_exceptions +def delete(*, schema_oid: int, database_id: int, **kwargs) -> None: + """ + Delete a schema, given its OID. + + Args: + schema_oid: The OID of the schema to delete. + database_id: The Django id of the database containing the schema. + """ + with connect(database_id, kwargs.get(REQUEST_KEY).user) as conn: + drop_schema_via_oid(conn, schema_oid) + + +@rpc_method(name="schemas.patch") +@http_basic_auth_login_required +@handle_rpc_exceptions +def patch(*, schema_oid: int, database_id: int, patch: SchemaPatch, **kwargs) -> SchemaInfo: + """ + Patch a schema, given its OID. + + Args: + schema_oid: The OID of the schema to delete. + database_id: The Django id of the database containing the schema. + patch: A SchemaPatch object containing the fields to update. + + Returns: + The SchemaInfo describing the user-defined schema in the database. + """ + with connect(database_id, kwargs.get(REQUEST_KEY).user) as conn: + return patch_schema(schema_oid, conn, patch) diff --git a/mathesar/rpc/schemas/privileges.py b/mathesar/rpc/schemas/privileges.py new file mode 100644 index 0000000000..14a8974de8 --- /dev/null +++ b/mathesar/rpc/schemas/privileges.py @@ -0,0 +1,114 @@ +from typing import Literal, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.roles.operations.ownership import transfer_schema_ownership +from db.roles.operations.select import list_schema_privileges +from db.roles.operations.update import replace_schema_privileges_for_roles +from mathesar.rpc.utils import connect +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.schemas.base import SchemaInfo + + +class SchemaPrivileges(TypedDict): + """ + Information about schema privileges for a role. + + Attributes: + role_oid: The `oid` of the role. + direct: A list of schema privileges for the afforementioned role_oid. + """ + role_oid: int + direct: list[Literal['USAGE', 'CREATE']] + + @classmethod + def from_dict(cls, d): + return cls( + role_oid=d["role_oid"], + direct=d["direct"] + ) + + +@rpc_method(name="schemas.privileges.list_direct") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_direct( + *, schema_oid: int, database_id: int, **kwargs +) -> list[SchemaPrivileges]: + """ + List direct schema privileges for roles. + + Args: + schema_oid: The OID of the schema whose privileges we'll list. + database_id: The Django id of the database containing the schema. + + Returns: + A list of schema privileges. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_priv = list_schema_privileges(schema_oid, conn) + return [SchemaPrivileges.from_dict(i) for i in raw_priv] + + +@rpc_method(name="schemas.privileges.replace_for_roles") +@http_basic_auth_login_required +@handle_rpc_exceptions +def replace_for_roles( + *, + privileges: list[SchemaPrivileges], schema_oid: int, database_id: int, + **kwargs +) -> list[SchemaPrivileges]: + """ + Replace direct schema privileges for roles. + + Possible privileges are `USAGE` and `CREATE`. + + Only roles which are included in a passed `SchemaPrivileges` object + are affected. + + WARNING: Any privilege included in the `direct` list for a role + is GRANTed, and any privilege not included is REVOKEd. + + Args: + privileges: The new privilege sets for roles. + schema_oid: The OID of the affected schema. + database_id: The Django id of the database containing the schema. + + Returns: + A list of all non-default privileges on the schema after the + operation. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_priv = replace_schema_privileges_for_roles( + conn, schema_oid, [SchemaPrivileges.from_dict(i) for i in privileges] + ) + return [SchemaPrivileges.from_dict(i) for i in raw_priv] + + +@rpc_method(name="schemas.privileges.transfer_ownership") +@http_basic_auth_login_required +@handle_rpc_exceptions +def transfer_ownership(*, schema_oid: int, new_owner_oid: int, database_id: int, **kwargs) -> SchemaInfo: + """ + Transfers ownership of a given schema to a new owner. + + Attributes: + schema_oid: The OID of the schema to transfer. + new_owner_oid: The OID of the role whom we want to be the new owner of the schema. + + Note: To successfully transfer ownership of a schema to a new owner the current user must: + - Be a Superuser/Owner of the schema. + - Be a `MEMBER` of the new owning role. i.e. The current role should be able to `SET ROLE` + to the new owning role. + - Have `CREATE` privilege for the database. + + Returns: + Information about the schema, and the current user privileges. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + schema_info = transfer_schema_ownership(schema_oid, new_owner_oid, conn) + return SchemaInfo(schema_info) diff --git a/mathesar/rpc/servers/__init__.py b/mathesar/rpc/servers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/mathesar/rpc/servers/configured.py b/mathesar/rpc/servers/configured.py new file mode 100644 index 0000000000..0d488966d6 --- /dev/null +++ b/mathesar/rpc/servers/configured.py @@ -0,0 +1,44 @@ +from typing import TypedDict + +from modernrpc.core import rpc_method +from modernrpc.auth.basic import http_basic_auth_login_required + +from mathesar.models.base import Server +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions + + +class ConfiguredServerInfo(TypedDict): + """ + Information about a database server. + + Attributes: + id: the Django ID of the server model instance. + host: The host of the database server. + port: the port of the database server. + """ + id: int + host: str + port: int + + @classmethod + def from_model(cls, model): + return cls( + id=model.id, + host=model.host, + port=model.port + ) + + +@rpc_method(name="servers.configured.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_() -> list[ConfiguredServerInfo]: + """ + List information about servers. Exposed as `list`. + + Returns: + A list of server details. + """ + server_qs = Server.objects.all() + + return [ConfiguredServerInfo.from_model(db_model) for db_model in server_qs] diff --git a/mathesar/rpc/tables/__init__.py b/mathesar/rpc/tables/__init__.py new file mode 100644 index 0000000000..44c8b96840 --- /dev/null +++ b/mathesar/rpc/tables/__init__.py @@ -0,0 +1 @@ +from .base import * # noqa diff --git a/mathesar/rpc/tables/base.py b/mathesar/rpc/tables/base.py new file mode 100644 index 0000000000..02cdeb4a6e --- /dev/null +++ b/mathesar/rpc/tables/base.py @@ -0,0 +1,406 @@ +""" +Classes and functions exposed to the RPC endpoint for managing tables in a database. +""" +from typing import Literal, Optional, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.tables.operations.select import get_table_info, get_table, list_joinable_tables +from db.tables.operations.drop import drop_table_from_database +from db.tables.operations.create import create_table_on_database +from db.tables.operations.alter import alter_table_on_database +from db.tables.operations.import_ import import_csv, get_preview +from mathesar.rpc.columns import CreatableColumnInfo, SettableColumnInfo, PreviewableColumnInfo +from mathesar.rpc.constraints import CreatableConstraintInfo +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.tables.metadata import TableMetaDataBlob +from mathesar.rpc.utils import connect +from mathesar.utils.tables import list_tables_meta_data, get_table_meta_data + + +class TableInfo(TypedDict): + """ + Information about a table. + + Attributes: + oid: The `oid` of the table in the schema. + name: The name of the table. + schema: The `oid` of the schema where the table lives. + description: The description of the table. + owner_oid: The OID of the direct owner of the table. + current_role_priv: The privileges available to the user on the table. + current_role_owns: Whether the current role owns the table. + """ + oid: int + name: str + schema: int + description: Optional[str] + owner_oid: int + current_role_priv: list[ + Literal[ + 'SELECT', + 'INSERT', + 'UPDATE', + 'DELETE', + 'TRUNCATE', + 'REFERENCES', + 'TRIGGER' + ] + ] + current_role_owns: bool + + +class AddedTableInfo(TypedDict): + """ + Information about a newly created table. + + Attributes: + oid: The `oid` of the table in the schema. + name: The name of the table. + """ + oid: int + name: str + + +class SettableTableInfo(TypedDict): + """ + Information about a table, restricted to settable fields. + + When possible, Passing `null` for a key will clear the underlying + setting. E.g., + + - `description = null` clears the table description. + + Setting any of `name`, `columns` to `null` is a noop. + + Attributes: + name: The new name of the table. + description: The description of the table. + columns: A list describing desired column alterations. + """ + name: Optional[str] + description: Optional[str] + columns: Optional[list[SettableColumnInfo]] + + +class JoinableTableRecord(TypedDict): + """ + Information about a singular joinable table. + + Attributes: + base: The OID of the table from which the paths start + target: The OID of the table where the paths end. + join_path: A list describing joinable paths in the following form: + [ + [[L_oid0, L_attnum0], [R_oid0, R_attnum0]], + [[L_oid1, L_attnum1], [R_oid1, R_attnum1]], + [[L_oid2, L_attnum2], [R_oid2, R_attnum2]], + ... + ] + + Here, [L_oidN, L_attnumN] represents the left column of a join, and [R_oidN, R_attnumN] the right. + fkey_path: Same as `join_path` expressed in terms of foreign key constraints in the following form: + [ + [constraint_id0, reversed], + [constraint_id1, reversed], + ] + + In this form, `constraint_idN` is a foreign key constraint, and `reversed` is a boolean giving + whether to travel from referrer to referant (when False) or from referant to referrer (when True). + depth: Specifies how far to search for joinable tables. + multiple_results: Specifies whether the path included is reversed. + """ + base: int + target: int + join_path: list + fkey_path: list + depth: int + multiple_results: bool + + @classmethod + def from_dict(cls, joinables): + return cls( + base=joinables["base"], + target=joinables["target"], + join_path=joinables["join_path"], + fkey_path=joinables["fkey_path"], + depth=joinables["depth"], + multiple_results=joinables["multiple_results"] + ) + + +class JoinableTableInfo(TypedDict): + """ + Information about joinable table(s). + + Attributes: + joinable_tables: List of reachable joinable table(s) from a base table. + target_table_info: Additional info about target table(s) and its column(s). + """ + joinable_tables: list[JoinableTableRecord] + target_table_info: list + + @classmethod + def from_dict(cls, joinable_dict): + return cls( + joinable_tables=[JoinableTableRecord.from_dict(j) for j in joinable_dict["joinable_tables"]], + target_table_info=joinable_dict["target_table_info"] + ) + + +@rpc_method(name="tables.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, schema_oid: int, database_id: int, **kwargs) -> list[TableInfo]: + """ + List information about tables for a schema. Exposed as `list`. + + Args: + schema_oid: Identity of the schema in the user's database. + database_id: The Django id of the database containing the table. + + Returns: + A list of table details. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_table_info = get_table_info(schema_oid, conn) + return [ + TableInfo(tab) for tab in raw_table_info + ] + + +@rpc_method(name="tables.get") +@http_basic_auth_login_required +@handle_rpc_exceptions +def get(*, table_oid: int, database_id: int, **kwargs) -> TableInfo: + """ + List information about a table for a schema. + + Args: + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + + Returns: + Table details for a given table oid. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_table_info = get_table(table_oid, conn) + return TableInfo(raw_table_info) + + +@rpc_method(name="tables.add") +@http_basic_auth_login_required +@handle_rpc_exceptions +def add( + *, + schema_oid: int, + database_id: int, + table_name: str = None, + column_data_list: list[CreatableColumnInfo] = [], + constraint_data_list: list[CreatableConstraintInfo] = [], + owner_oid: int = None, + comment: str = None, + **kwargs +) -> AddedTableInfo: + """ + Add a table with a default id column. + + Args: + schema_oid: Identity of the schema in the user's database. + database_id: The Django id of the database containing the table. + table_name: Name of the table to be created. + column_data_list: A list describing columns to be created for the new table, in order. + constraint_data_list: A list describing constraints to be created for the new table. + owner_oid: The OID of the role who will own the new table. + If owner_oid is None, the current role will be the owner of the new table. + comment: The comment for the new table. + + Returns: + The `oid` & `name` of the created table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + created_table_oid = create_table_on_database( + table_name, schema_oid, conn, column_data_list, constraint_data_list, owner_oid, comment + ) + return created_table_oid + + +@rpc_method(name="tables.delete") +@http_basic_auth_login_required +@handle_rpc_exceptions +def delete( + *, table_oid: int, database_id: int, cascade: bool = False, **kwargs +) -> str: + """ + Delete a table from a schema. + + Args: + table_oid: Identity of the table in the user's database. + database_id: The Django id of the database containing the table. + cascade: Whether to drop the dependent objects. + + Returns: + The name of the dropped table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return drop_table_from_database(table_oid, conn, cascade) + + +@rpc_method(name="tables.patch") +@http_basic_auth_login_required +@handle_rpc_exceptions +def patch( + *, table_oid: str, table_data_dict: SettableTableInfo, database_id: int, **kwargs +) -> str: + """ + Alter details of a preexisting table in a database. + + Args: + table_oid: Identity of the table whose name, description or columns we'll modify. + table_data_dict: A list describing desired table alterations. + database_id: The Django id of the database containing the table. + + Returns: + The name of the altered table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return alter_table_on_database(table_oid, table_data_dict, conn) + + +@rpc_method(name="tables.import") +@http_basic_auth_login_required +@handle_rpc_exceptions +def import_( + *, + data_file_id: int, + schema_oid: int, + database_id: int, + table_name: str = None, + comment: str = None, + **kwargs +) -> AddedTableInfo: + """ + Import a CSV/TSV into a table. + + Args: + data_file_id: The Django id of the DataFile containing desired CSV/TSV. + schema_oid: Identity of the schema in the user's database. + database_id: The Django id of the database containing the table. + table_name: Name of the table to be imported. + comment: The comment for the new table. + + Returns: + The `oid` and `name` of the created table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return import_csv(data_file_id, table_name, schema_oid, conn, comment) + + +@rpc_method(name="tables.get_import_preview") +@http_basic_auth_login_required +@handle_rpc_exceptions +def get_import_preview( + *, + table_oid: int, + columns: list[PreviewableColumnInfo], + database_id: int, + limit: int = 20, + **kwargs +) -> list[dict]: + """ + Preview an imported table. + + Args: + table_oid: Identity of the imported table in the user's database. + columns: List of settings describing the casts to be applied to the columns. + database_id: The Django id of the database containing the table. + limit: The upper limit for the number of records to return. + + Returns: + The records from the specified columns of the table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + return get_preview(table_oid, columns, conn, limit) + + +@rpc_method(name="tables.list_joinable") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_joinable( + *, + table_oid: int, + database_id: int, + max_depth: int = 3, + **kwargs +) -> JoinableTableInfo: + """ + List details for joinable tables. + + Args: + table_oid: Identity of the table to get joinable tables for. + database_id: The Django id of the database containing the table. + max_depth: Specifies how far to search for joinable tables. + + Returns: + Joinable table details for a given table. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + joinable_dict = list_joinable_tables(table_oid, conn, max_depth) + return JoinableTableInfo.from_dict(joinable_dict) + + +@rpc_method(name="tables.list_with_metadata") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_with_metadata(*, schema_oid: int, database_id: int, **kwargs) -> list: + """ + List tables in a schema, along with the metadata associated with each table + + Args: + schema_oid: PostgreSQL OID of the schema containing the tables. + database_id: The Django id of the database containing the table. + + Returns: + A list of table details along with metadata. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + tables = get_table_info(schema_oid, conn) + + metadata_records = list_tables_meta_data(database_id) + metadata_map = { + r.table_oid: TableMetaDataBlob.from_model(r) for r in metadata_records + } + + return [table | {"metadata": metadata_map.get(table["oid"])} for table in tables] + + +@rpc_method(name="tables.get_with_metadata") +@http_basic_auth_login_required +@handle_rpc_exceptions +def get_with_metadata(*, table_oid: int, database_id: int, **kwargs) -> dict: + """ + Get information about a table in a schema, along with the associated table metadata. + + Args: + table_oid: The OID of the table in the user's database. + database_id: The Django id of the database containing the table. + + Returns: + A dict describing table details along with its metadata. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + table = get_table(table_oid, conn) + + raw_metadata = get_table_meta_data(table_oid, database_id) + return TableInfo(table) | {"metadata": TableMetaDataBlob.from_model(raw_metadata)} diff --git a/mathesar/rpc/tables/metadata.py b/mathesar/rpc/tables/metadata.py new file mode 100644 index 0000000000..bec5e79b0b --- /dev/null +++ b/mathesar/rpc/tables/metadata.py @@ -0,0 +1,113 @@ +""" +Classes and functions exposed to the RPC endpoint for managing table metadata. +""" +from typing import Optional, TypedDict + +from modernrpc.core import rpc_method +from modernrpc.auth.basic import http_basic_auth_login_required + +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.utils.tables import list_tables_meta_data, set_table_meta_data + + +class TableMetaDataRecord(TypedDict): + """ + Metadata for a table in a database. + + Only the `database` and `table_oid` keys are required. + + Attributes: + id: The Django id of the TableMetaData object. + database_id: The Django id of the database containing the table. + table_oid: The OID of the table in the database. + data_file_id: Specifies the DataFile model id used for the import. + import_verified: Specifies whether a file has been successfully imported into a table. + column_order: The order in which columns of a table are displayed. + record_summary_customized: Specifies whether the record summary has been customized. + record_summary_template: Record summary template for a referent column. + """ + id: int + database_id: int + table_oid: int + data_file_id: Optional[int] + import_verified: Optional[bool] + column_order: Optional[list[int]] + record_summary_customized: Optional[bool] + record_summary_template: Optional[str] + + @classmethod + def from_model(cls, model): + return cls( + id=model.id, + database_id=model.database.id, + table_oid=model.table_oid, + data_file_id=model.data_file_id, + import_verified=model.import_verified, + column_order=model.column_order, + record_summary_customized=model.record_summary_customized, + record_summary_template=model.record_summary_template, + ) + + +class TableMetaDataBlob(TypedDict): + """ + The metadata fields which can be set on a table + + Attributes: + data_file_id: Specifies the DataFile model id used for the import. + import_verified: Specifies whether a file has been successfully imported into a table. + column_order: The order in which columns of a table are displayed. + record_summary_customized: Specifies whether the record summary has been customized. + record_summary_template: Record summary template for a referent column. + """ + data_file_id: Optional[int] + import_verified: Optional[bool] + column_order: Optional[list[int]] + record_summary_customized: Optional[bool] + record_summary_template: Optional[str] + + @classmethod + def from_model(cls, model): + return cls( + data_file_id=model.data_file_id, + import_verified=model.import_verified, + column_order=model.column_order, + record_summary_customized=model.record_summary_customized, + record_summary_template=model.record_summary_template, + ) + + +@rpc_method(name="tables.metadata.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_(*, database_id: int, **kwargs) -> list[TableMetaDataRecord]: + """ + List metadata associated with tables for a database. + + Args: + database_id: The Django id of the database containing the table. + + Returns: + Metadata object for a given table oid. + """ + table_meta_data = list_tables_meta_data(database_id) + return [ + TableMetaDataRecord.from_model(model) for model in table_meta_data + ] + + +@rpc_method(name="tables.metadata.set") +@http_basic_auth_login_required +@handle_rpc_exceptions +def set_( + *, table_oid: int, metadata: TableMetaDataBlob, database_id: int, **kwargs +) -> None: + """ + Set metadata for a table. + + Args: + table_oid: The PostgreSQL OID of the table. + metadata: A TableMetaDataBlob object describing desired table metadata to set. + database_id: The Django id of the database containing the table. + """ + set_table_meta_data(table_oid, metadata, database_id) diff --git a/mathesar/rpc/tables/privileges.py b/mathesar/rpc/tables/privileges.py new file mode 100644 index 0000000000..755d983e48 --- /dev/null +++ b/mathesar/rpc/tables/privileges.py @@ -0,0 +1,111 @@ +from typing import Literal, TypedDict + +from modernrpc.core import rpc_method, REQUEST_KEY +from modernrpc.auth.basic import http_basic_auth_login_required + +from db.roles.operations.ownership import transfer_table_ownership +from db.roles.operations.select import list_table_privileges +from db.roles.operations.update import replace_table_privileges_for_roles +from mathesar.rpc.utils import connect +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.rpc.tables.base import TableInfo + + +class TablePrivileges(TypedDict): + """ + Information about table privileges for a role. + Attributes: + role_oid: The `oid` of the role. + direct: A list of table privileges for the afforementioned role_oid. + """ + role_oid: int + direct: list[Literal['INSERT', 'SELECT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER']] + + @classmethod + def from_dict(cls, d): + return cls( + role_oid=d["role_oid"], + direct=d["direct"] + ) + + +@rpc_method(name="tables.privileges.list_direct") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_direct( + *, table_oid: int, database_id: int, **kwargs +) -> list[TablePrivileges]: + """ + List direct table privileges for roles. + Args: + table_oid: The OID of the table whose privileges we'll list. + database_id: The Django id of the database containing the table. + Returns: + A list of table privileges. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_priv = list_table_privileges(table_oid, conn) + return [TablePrivileges.from_dict(i) for i in raw_priv] + + +@rpc_method(name="tables.privileges.replace_for_roles") +@http_basic_auth_login_required +@handle_rpc_exceptions +def replace_for_roles( + *, + privileges: list[TablePrivileges], table_oid: int, database_id: int, + **kwargs +) -> list[TablePrivileges]: + """ + Replace direct table privileges for roles. + + Possible privileges are `INSERT`, `SELECT`, `UPDATE`, `DELETE`, `TRUNCATE`, `REFERENCES` and `TRIGGER`. + + Only roles which are included in a passed `TablePrivileges` object + are affected. + + WARNING: Any privilege included in the `direct` list for a role + is GRANTed, and any privilege not included is REVOKEd. + + Args: + privileges: The new privilege sets for roles. + table_oid: The OID of the affected table. + database_id: The Django id of the database containing the table. + + Returns: + A list of all non-default privileges on the table after the + operation. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + raw_priv = replace_table_privileges_for_roles( + conn, table_oid, [TablePrivileges.from_dict(i) for i in privileges] + ) + return [TablePrivileges.from_dict(i) for i in raw_priv] + + +@rpc_method(name="tables.privileges.transfer_ownership") +@http_basic_auth_login_required +@handle_rpc_exceptions +def transfer_ownership(*, table_oid: int, new_owner_oid: int, database_id: int, **kwargs) -> TableInfo: + """ + Transfers ownership of a given table to a new owner. + + Attributes: + table_oid: The OID of the table to transfer. + new_owner_oid: The OID of the role whom we want to be the new owner of the table. + + Note: To successfully transfer ownership of a table to a new owner the current user must: + - Be a Superuser/Owner of the table. + - Be a `MEMBER` of the new owning role. i.e. The current role should be able to `SET ROLE` + to the new owning role. + - Have `CREATE` privilege on the table's schema. + + Returns: + Information about the table, and the current user privileges. + """ + user = kwargs.get(REQUEST_KEY).user + with connect(database_id, user) as conn: + table_info = transfer_table_ownership(table_oid, new_owner_oid, conn) + return TableInfo(table_info) diff --git a/mathesar/rpc/types.py b/mathesar/rpc/types.py new file mode 100644 index 0000000000..016b59b7a7 --- /dev/null +++ b/mathesar/rpc/types.py @@ -0,0 +1,47 @@ +""" +Classes and functions exposed to the RPC endpoint for listing types in a database. +""" + +from typing import Optional, TypedDict + +from modernrpc.core import rpc_method +from modernrpc.auth.basic import http_basic_auth_login_required + +from mathesar.rpc.exceptions.handlers import handle_rpc_exceptions +from mathesar.database.types import UIType +from mathesar.api.display_options import DISPLAY_OPTIONS_BY_UI_TYPE + + +class TypeInfo(TypedDict): + """ + Information about a type. + + Attributes: + identifier: Specifies the type class that db_type(s) belongs to. + name: Specifies the UI name for a type class. + db_types: Specifies the name(s) of types present on the database. + display_options: Specifies metadata related to a type class. + """ + identifier: str + name: str + db_types: list + display_options: Optional[dict] + + @classmethod + def from_dict(cls, type): + return cls( + identifier=type.id, + name=type.display_name, + db_types=[db_type.id for db_type in type.db_types], + display_options=DISPLAY_OPTIONS_BY_UI_TYPE.get(type, None) + ) + + +@rpc_method(name="types.list") +@http_basic_auth_login_required +@handle_rpc_exceptions +def list_() -> list[TypeInfo]: + """ + List information about types available on the database. Exposed as `list`. + """ + return [TypeInfo.from_dict(type) for type in UIType] diff --git a/mathesar/rpc/utils.py b/mathesar/rpc/utils.py new file mode 100644 index 0000000000..bffa807a73 --- /dev/null +++ b/mathesar/rpc/utils.py @@ -0,0 +1,15 @@ +from mathesar.models.base import UserDatabaseRoleMap + + +def connect(database_id, user): + """ + Get a psycopg database connection. + + Args: + database_id: The Django id of the Database used for connecting. + user: A user model instance who'll connect to the database. + """ + user_database_role = UserDatabaseRoleMap.objects.get( + user=user, database__id=database_id + ) + return user_database_role.connection diff --git a/mathesar/signals.py b/mathesar/signals.py index 83b673e93b..bcec182ab1 100644 --- a/mathesar/signals.py +++ b/mathesar/signals.py @@ -1,7 +1,7 @@ from django.db.models.signals import post_save from django.dispatch import receiver -from mathesar.models.base import ( +from mathesar.models.deprecated import ( Column, Table, _set_default_preview_template, _create_table_settings, ) diff --git a/mathesar/start.py b/mathesar/start.py deleted file mode 100644 index 6ebc452983..0000000000 --- a/mathesar/start.py +++ /dev/null @@ -1,7 +0,0 @@ -import sys -import gunicorn.app.wsgiapp as wsgi -from mathesar.install import main as run_install -# This is just a simple way to supply args to gunicorn -sys.argv = [".", "config.wsgi", "--bind=0.0.0.0:8000"] -run_install(skip_static_collection=True) -wsgi.run() diff --git a/mathesar/state/django.py b/mathesar/state/django.py index a812a9ca93..ebb5cfbcfb 100644 --- a/mathesar/state/django.py +++ b/mathesar/state/django.py @@ -11,8 +11,8 @@ from db.constraints.operations.select import get_constraints_with_oids from db.schemas.operations.select import get_mathesar_schemas_with_oids from db.tables.operations.select import get_table_oids_from_schemas -# We import the entire models.base module to avoid a circular import error -from mathesar.models import base as models +# We import the entire models_deprecated.deprecated module to avoid a circular import error +from mathesar.models import deprecated as models_deprecated from mathesar.api.serializers.shared_serializers import DisplayOptionsMappingSerializer, \ DISPLAY_OPTIONS_SERIALIZER_MAPPING_KEY from mathesar.database.base import create_mathesar_engine @@ -32,28 +32,28 @@ def clear_dj_cache(): def reflect_db_objects(metadata, db_name=None): - databases = models.Database.current_objects.all() + databases = models_deprecated.Connection.current_objects.all() if db_name is not None: databases = databases.filter(name=db_name) sync_databases_status(databases) for database in databases: if database.deleted is False: reflect_schemas_from_database(database) - schemas = models.Schema.current_objects.filter(database=database).prefetch_related( + schemas = models_deprecated.Schema.current_objects.filter(database=database).prefetch_related( Prefetch('database', queryset=databases) ) reflect_tables_from_schemas(schemas, metadata=metadata) - tables = models.Table.current_objects.filter(schema__in=schemas).prefetch_related( + tables = models_deprecated.Table.current_objects.filter(schema__in=schemas).prefetch_related( Prefetch('schema', queryset=schemas) ) reflect_columns_from_tables(tables, metadata=metadata) reflect_constraints_from_database(database) else: - models.Schema.current_objects.filter(database=database).delete() + models_deprecated.Schema.current_objects.filter(database=database).delete() def sync_databases_status(databases): - """Update status and check health for current Database Model instances.""" + """Update status and check health for current Connection Model instances.""" for db in databases: try: db._sa_engine.connect() @@ -65,7 +65,7 @@ def sync_databases_status(databases): def _set_db_is_deleted(db, deleted): """ - Assures that a Django Database model's `deleted` field is equal to the `deleted` + Assures that a Django Connection model's `deleted` field is equal to the `deleted` parameter, updating if necessary. Takes care to `save()` only when an update has been performed, to save on the noteworthy performance cost. """ @@ -83,10 +83,10 @@ def reflect_schemas_from_database(database): schemas = [] for oid in db_schema_oids: - schema = models.Schema(oid=oid, database=database) + schema = models_deprecated.Schema(oid=oid, database=database) schemas.append(schema) - models.Schema.current_objects.bulk_create(schemas, ignore_conflicts=True) - for schema in models.Schema.current_objects.all().select_related('database'): + models_deprecated.Schema.current_objects.bulk_create(schemas, ignore_conflicts=True) + for schema in models_deprecated.Schema.current_objects.all().select_related('database'): if schema.database == database and schema.oid not in db_schema_oids: # Deleting Schemas are a rare occasion, not worth deleting in bulk schema.delete() @@ -105,17 +105,17 @@ def reflect_tables_from_schemas(schemas, metadata): tables = [] for oid, schema_oid in db_table_oids: schema = next(schema for schema in schemas if schema.oid == schema_oid) - table = models.Table(oid=oid, schema=schema) + table = models_deprecated.Table(oid=oid, schema=schema) tables.append(table) - models.Table.current_objects.bulk_create(tables, ignore_conflicts=True) + models_deprecated.Table.current_objects.bulk_create(tables, ignore_conflicts=True) # Calling signals manually because bulk create does not emit any signals - models._create_table_settings(models.Table.current_objects.filter(settings__isnull=True)) + models_deprecated._create_table_settings(models_deprecated.Table.current_objects.filter(settings__isnull=True)) deleted_tables = [] - for table in models.Table.current_objects.filter(schema__in=schemas).select_related('schema'): + for table in models_deprecated.Table.current_objects.filter(schema__in=schemas).select_related('schema'): if (table.oid, table.schema.oid) not in db_table_oids: deleted_tables.append(table.id) - models.Table.current_objects.filter(id__in=deleted_tables).delete() + models_deprecated.Table.current_objects.filter(id__in=deleted_tables).delete() def reflect_columns_from_tables(tables, metadata): @@ -130,14 +130,14 @@ def reflect_columns_from_tables(tables, metadata): _delete_stale_columns(attnum_tuples, tables) # Manually trigger preview templates computation signal for table in tables: - models._set_default_preview_template(table) + models_deprecated._set_default_preview_template(table) _invalidate_columns_with_incorrect_display_options(tables) def _invalidate_columns_with_incorrect_display_options(tables): columns_with_invalid_display_option = [] - columns = models.Column.current_objects.filter(table__in=tables) + columns = models_deprecated.Column.current_objects.filter(table__in=tables) for column in columns: if column.display_options: # If the type of column has changed, existing display options won't be valid anymore. @@ -148,16 +148,16 @@ def _invalidate_columns_with_incorrect_display_options(tables): if not serializer.is_valid(raise_exception=False): columns_with_invalid_display_option.append(column.id) if len(columns_with_invalid_display_option) > 0: - models.Column.current_objects.filter(id__in=columns_with_invalid_display_option).update(display_options=None) + models_deprecated.Column.current_objects.filter(id__in=columns_with_invalid_display_option).update(display_options=None) def _create_reflected_columns(attnum_tuples, tables): columns = [] for attnum, table_oid in attnum_tuples: table = next(table for table in tables if table.oid == table_oid) - column = models.Column(attnum=attnum, table=table, display_options=None) + column = models_deprecated.Column(attnum=attnum, table=table, display_options=None) columns.append(column) - models.Column.current_objects.bulk_create(columns, ignore_conflicts=True) + models_deprecated.Column.current_objects.bulk_create(columns, ignore_conflicts=True) def _delete_stale_columns(attnum_tuples, tables): @@ -176,7 +176,7 @@ def _delete_stale_columns(attnum_tuples, tables): operator.or_, stale_columns_conditions ) - models.Column.objects.filter(stale_columns_query).delete() + models_deprecated.Column.objects.filter(stale_columns_query).delete() # TODO pass in a cached engine instead of creating a new one @@ -190,14 +190,14 @@ def reflect_constraints_from_database(database): map_of_table_oid_to_constraint_oids[table_oid].append(constraint_oid) table_oids = map_of_table_oid_to_constraint_oids.keys() - tables = models.Table.current_objects.filter(oid__in=table_oids) + tables = models_deprecated.Table.current_objects.filter(oid__in=table_oids) constraint_objs_to_create = [] for table in tables: constraint_oids = map_of_table_oid_to_constraint_oids.get(table.oid, []) for constraint_oid in constraint_oids: - constraint_obj = models.Constraint(oid=constraint_oid, table=table) + constraint_obj = models_deprecated.Constraint(oid=constraint_oid, table=table) constraint_objs_to_create.append(constraint_obj) - models.Constraint.current_objects.bulk_create(constraint_objs_to_create, ignore_conflicts=True) + models_deprecated.Constraint.current_objects.bulk_create(constraint_objs_to_create, ignore_conflicts=True) _delete_stale_dj_constraints(db_constraints, database) engine.dispose() @@ -212,7 +212,7 @@ def _delete_stale_dj_constraints(known_db_constraints, database): for known_db_constraint in known_db_constraints ) - stale_dj_constraints = models.Constraint.current_objects.filter( + stale_dj_constraints = models_deprecated.Constraint.current_objects.filter( ~Q(oid__in=known_db_constraint_oids), table__schema__database=database, ) @@ -224,7 +224,7 @@ def reflect_new_table_constraints(table): engine = create_mathesar_engine(table.schema.database) db_constraints = get_constraints_with_oids(engine, table_oid=table.oid) constraints = [ - models.Constraint.current_objects.get_or_create( + models_deprecated.Constraint.current_objects.get_or_create( oid=db_constraint['oid'], table=table ) diff --git a/mathesar/templates/mathesar/login_base.html b/mathesar/templates/mathesar/login_base.html index 3f24e64c22..8d324afddf 100644 --- a/mathesar/templates/mathesar/login_base.html +++ b/mathesar/templates/mathesar/login_base.html @@ -112,41 +112,6 @@ Mathesar Logo - {% if live_demo_mode %} -
-
⚠️
-

{% translate "Unsupported Screen Size" %}

-

- {% translate "Mathesar is a spreadsheet-like application with a rich UI that does not yet function well on screens this small. Improved support for mobile devices is on our" %} - - - {% translate "roadmap" %} - . -

-

- {% translate "You can still use this demo site, but some features may not work correctly. We encourage you to try Mathesar on a device with a larger screen." %} -

-
-
-
- {% translate "Live Demo Mode" %} -
-
- {% translate "Mathesar's live demo is available to anyone to try out." %} - - {% if live_demo_username and live_demo_password %} - {% translate "Use the following credentials to login" %}: -
    -
  • {% translate "Username" %}: {{live_demo_username}}
  • -
  • {% translate "Password" %}: {{live_demo_password}}
  • -
- {% endif %} - - {% translate "Keep in mind that the data in the live demo is reset regularly." %} -
-
- {% endif %} -