diff --git a/README.md b/README.md index ee580b4..e51a649 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ A Streamlit connection component to connect Streamlit to Supabase Storage and Database. ## :thinking: Why use this? +- [X] A new `query()` method available to run cached select queries on the database. **Save time and money** on your API requests - [X] **Same method names as the Supabase Python API** - [X] It is built on top of [`storage-py`](https://github.com/supabase-community/storage-py) and **exposes more methods** than currently supported by the Supabase Python API. For example, `update()`, `create_signed_upload_url()`, and `upload_to_signed_url()` - [X] **Consistent logging syntax.** All statements follow the syntax `client.method("bucket_id", **options)` @@ -41,7 +42,7 @@ A Streamlit connection component to connect Streamlit to Supabase Storage and Da mime = mimetypes.guess_type(file_name)[0] data = open(file_name, "rb") - st.download_button("Download file ⏬", data=data, file_name=file_name, mime=mime) + st.download_button("Download file", data=data, file_name=file_name, mime=mime) ``` @@ -61,7 +62,7 @@ A Streamlit connection component to connect Streamlit to Supabase Storage and Da if st.button("Request download"): file_name, mime, data = st_supabase_client.download(bucket_id, source_path) - st.download_button("Download file ⏬", data=data, file_name=file_name, mime=mime) + st.download_button("Download file", data=data, file_name=file_name, mime=mime) ``` @@ -158,8 +159,28 @@ pip install st-supabase-connection #### List existing buckets ```python >>> st_supabase.list_buckets() -[SyncBucket(id='bucket1', name='bucket1', owner='', public=False, created_at=datetime.datetime(2023, 7, 31, 19, 56, 21, 518438, tzinfo=tzutc()), updated_at=datetime.datetime(2023, 7, 31, 19, 56, 21, 518438, tzinfo=tzutc()), file_size_limit=None, allowed_mime_types=None), - SyncBucket(id='bucket2', name='bucket2', owner='', public=True, created_at=datetime.datetime(2023, 7, 31, 19, 56, 28, 203536, tzinfo=tzutc()), updated_at=datetime.datetime(2023, 7, 31, 19, 56, 28, 203536, tzinfo=tzutc()), file_size_limit=100, allowed_mime_types=["image/jpg","image/png"])] +[ + SyncBucket( + id="bucket1", + name="bucket1", + owner="", + public=False, + created_at=datetime.datetime(2023, 7, 31, 19, 56, 21, 518438, tzinfo=tzutc()), + updated_at=datetime.datetime(2023, 7, 31, 19, 56, 21, 518438, tzinfo=tzutc()), + file_size_limit=None, + allowed_mime_types=None, + ), + SyncBucket( + id="bucket2", + name="bucket2", + owner="", + public=True, + created_at=datetime.datetime(2023, 7, 31, 19, 56, 28, 203536, tzinfo=tzutc()), + updated_at=datetime.datetime(2023, 7, 31, 19, 56, 28, 203536, tzinfo=tzutc()), + file_size_limit=100, + allowed_mime_types=["image/jpg", "image/png"], + ), +] ``` #### Create a bucket ```python @@ -186,49 +207,77 @@ pip install st-supabase-connection #### List objects in a bucket ```python >>> st_supabase_client.list_objects("new_bucket", path="folder1") -[{'name': 'new_test.png', - 'id': 'e506920e-2834-440e-85f1-1d5476927582', - 'updated_at': '2023-08-02T19:53:22.53986+00:00', - 'created_at': '2023-08-02T19:52:20.404391+00:00', - 'last_accessed_at': '2023-08-02T19:53:21.833+00:00', - 'metadata': {'eTag': '"814a0034f5549e957ee61360d87457e5"', - 'size': 473831, - 'mimetype': 'image/png', - 'cacheControl': 'max-age=3600', - 'lastModified': '2023-08-02T19:53:23.000Z', - 'contentLength': 473831, - 'httpStatusCode': 200}}] +[ + { + "name": "new_test.png", + "id": "e506920e-2834-440e-85f1-1d5476927582", + "updated_at": "2023-08-02T19:53:22.53986+00:00", + "created_at": "2023-08-02T19:52:20.404391+00:00", + "last_accessed_at": "2023-08-02T19:53:21.833+00:00", + "metadata": { + "eTag": '"814a0034f5549e957ee61360d87457e5"', + "size": 473831, + "mimetype": "image/png", + "cacheControl": "max-age=3600", + "lastModified": "2023-08-02T19:53:23.000Z", + "contentLength": 473831, + "httpStatusCode": 200, + }, + } +] ``` #### Delete a bucket ```python >>> st_supabase_client.delete_bucket("new_bucket") +{'message': 'Successfully deleted'} ``` -### :file_cabinet: Database -> [!NOTE] -> The connector's database methods behave exactly the same way as the Supabase Python API's database methods. Check the [Supabase Python API reference](https://supabase.com/docs/reference/python/select) for more examples. +### :file_cabinet: Database operations #### Simple query ```python ->>> st_supabase_client.table('countries').select("*").execute() -APIResponse(data=[{'id': 1, 'name': 'Afghanistan'}, - {'id': 2, 'name': 'Albania'}, - {'id': 3, 'name': 'Algeria'}], - count=None) +>>> st_supabase.query("*", from_="countries", ttl=None).execute() +APIResponse( + data=[ + {"id": 1, "name": "Afghanistan"}, + {"id": 2, "name": "Albania"}, + {"id": 3, "name": "Algeria"}, + ], + count=None, +) ``` #### Query with join ```python ->>> st_supabase_client.table('users').select('name, teams(name)').execute() -APIResponse(data=[ - {'name': 'Kiran', 'teams': [{'name': 'Green'}, {'name': 'Blue'}]}, - {'name': 'Evan', 'teams': [{'name': 'Blue'}]} - ], - count=None) +>>> st_supabase.query("name, teams(name)", from_="users", count="exact", ttl=None).execute() +APIResponse( + data=[ + {"name": "Kiran", "teams": [{"name": "Green"}, {"name": "Blue"}]}, + {"name": "Evan", "teams": [{"name": "Blue"}]}, + ], + count=None, +) ``` #### Filter through foreign tables ```python ->>> st_supabase_client.table('cities').select('name, countries(*)').eq('countries.name', 'Estonia').execute() -APIResponse(data=[{'name': 'Bali', 'countries': None}, - {'name': 'Munich', 'countries': None}], - count=None) +>>> st_supabase.query("name, countries(*)", count="exact", from_="cities", ttl=0).eq( + "countries.name", "Curaçao" + ).execute() + +APIResponse( + data=[ + { + "name": "Kralendijk", + "countries": { + "id": 2, + "name": "Curaçao", + "iso2": "CW", + "iso3": "CUW", + "local_name": None, + "continent": None, + }, + }, + {"name": "Willemstad", "countries": None}, + ], + count=2, +) ``` #### Insert rows @@ -236,15 +285,38 @@ APIResponse(data=[{'name': 'Bali', 'countries': None}, >>> st_supabase_client.table("countries").insert( [{"name": "Wakanda", "iso2": "WK"}, {"name": "Wadiya", "iso2": "WD"}], count="None" ).execute() -APIResponse(data=[{'id': 250, 'name': 'Wakanda', 'iso2': 'WK', 'iso3': None, 'local_name': None, 'continent': None}, {'id': 251, 'name': 'Wadiya', 'iso2': 'WD', 'iso3': None, 'local_name': None, 'continent': None}], count=None) +APIResponse( + data=[ + { + "id": 250, + "name": "Wakanda", + "iso2": "WK", + "iso3": None, + "local_name": None, + "continent": None, + }, + { + "id": 251, + "name": "Wadiya", + "iso2": "WD", + "iso3": None, + "local_name": None, + "continent": None, + }, + ], + count=None, +) ``` +> [!INFO] +> Check the [Supabase Python API reference](https://supabase.com/docs/reference/python/select) for more examples. + ## :star: Explore all options in Streamlit [![Open in Streamlit](https://static.streamlit.io/badges/streamlit_badge_black_white.svg)](https://st-supabase-connection.streamlit.app/) ## :bow: Acknowledgements This connector builds upon the awesome work done by the open-source community in general and the [Supabase Community](https://github.com/supabase-community) in particular. I cannot be more thankful to all the authors whose work I have used either directly or indirectly. -## 🤗 Want to support my work? +## :hugs: Want to support my work?
diff --git a/demo/app.py b/demo/app.py index de6498f..6300e6c 100644 --- a/demo/app.py +++ b/demo/app.py @@ -658,6 +658,13 @@ elif request_builder == "select": request_builder_query_label = "Enter the columns to fetch as comma-separated strings" placeholder = value = "*" + ttl = rcol_placeholder.text_input( + "Enter cache expiry duration", + value=0, + placeholder=None, + help="Set as `0` to always fetch the latest results, and leave blank to cache indefinitely.", + ) + ttl = None if ttl == "" else ttl elif request_builder == "delete": request_builder_query_label = "Delete query" placeholder = value = "Delete does not take a request builder query" @@ -709,9 +716,15 @@ operators = operators.replace(".__init__()", "").replace(".execute()", "") if operators: - constructed_db_query = f"""st_supabase.table("{table}").{request_builder}({request_builder_query}){operators}.execute()""" + if request_builder == "select": + constructed_db_query = f"""st_supabase.query({request_builder_query}, from_="{table}", {ttl=}){operators}.execute()""" + else: + constructed_db_query = f"""st_supabase.table("{table}").{request_builder}({request_builder_query}){operators}.execute()""" else: - constructed_db_query = f"""st_supabase.table("{table}").{request_builder}({request_builder_query}).execute()""" + if request_builder == "select": + constructed_db_query = f"""st_supabase.query({request_builder_query}, from_="{table}", {ttl=}).execute()""" + else: + constructed_db_query = f"""st_supabase.table("{table}").{request_builder}({request_builder_query}).execute()""" st.write("**Constructed statement**") st.code(constructed_db_query) diff --git a/src/st_supabase_connection/__init__.py b/src/st_supabase_connection/__init__.py index ddc1d2b..a02eb95 100644 --- a/src/st_supabase_connection/__init__.py +++ b/src/st_supabase_connection/__init__.py @@ -1,8 +1,11 @@ import os import urllib +from datetime import timedelta from io import BytesIO -from typing import Literal, Optional, Tuple, Union +from typing import Literal, Optional, Tuple, Union, types +from postgrest import SyncSelectRequestBuilder, types +from streamlit import cache_resource from streamlit.connections import ExperimentalBaseConnection from supabase import Client, create_client @@ -13,13 +16,11 @@ # def _get_bucket(): # return self.client.storage.get_bucket # return _get_bucket -# REF : https://discuss.streamlit.io/t/connections-hackathon/47574/24?u=siddhantsadangi -# REF : https://github.com/streamlit/files-connection/blob/main/st_files_connection/connection.py#L136 # TODO: Add cache to benefits in readme if implemented # TODO: Add optional headers to storage requests # TODO: support additional postgrest-py methods (https://github.com/supabase-community/postgrest-py/blob/master/postgrest/_sync/request_builder.py#L177C13-L177C13) -__version__ = "0.0.2" +__version__ = "0.0.4" class SupabaseConnection(ExperimentalBaseConnection[Client]): @@ -37,36 +38,6 @@ class SupabaseConnection(ExperimentalBaseConnection[Client]): ------- table : Perform a table operation - get_bucket : - Retrieve a bucket - list_buckets : - List all buckets - delete_bucket : - Delete a bucket - empty_bucket : - Empty a bucket - create_bucket : - Create a bucket - upload : - Upload files to a bucket - download : - Download files from a bucket - update_bucket : - Update bucket properties - move : - Move objects within a bucket - remove : - Removes objects from a bucket - list_objects : - List all objects in a bucket path - create_signed_urls : - Create a signed URL for a file in a bucket - get_public_url : - Retrieve the public URL for a file in a public bucket - create_signed_upload_url : - Create a signed URL to upload a file to a path in a bucket - upload_to_signed_url : - Upload a file to a bucket using a token from `create_signed_upload_url` """ def _connect(self, **kwargs) -> None: @@ -105,6 +76,36 @@ def _connect(self, **kwargs) -> None: self.delete_bucket = self.client.storage.delete_bucket self.empty_bucket = self.client.storage.empty_bucket + def query( + self, + *columns: str, + from_: str, + count: Optional[types.CountMethod] = None, + ttl: Optional[Union[int, str, timedelta]] = None, + ) -> SyncSelectRequestBuilder: + """ + Run a SELECT query. + + Parameters + ---------- + *columns : str + The names of the columns to fetch. + from_ : str + The table to run the query on. + count : str + The method to use to get the count of rows returned. Defaults to `None`. + ttl : float, timedelta, str, or None + The maximum time to keep an entry in the cache. Defaults to `None` (cache never expires). + """ + + @cache_resource( + ttl=ttl + ) # The return object is not serializable. This behaviour was retained to let users chain operations to the query + def _query(_self, *columns, from_, count): + return _self.client.table(from_).select(*columns, count=count) + + return _query(self, *columns, from_=from_, count=count) + def create_bucket( self, id: str,