Skip to content

Commit

Permalink
Caching API Response (#12)
Browse files Browse the repository at this point in the history
* v1.3.0: Added `execute_query()` to cache database API responses, removed `query()`

* v1.3.0: Updated README

* testing output display

* Testing output display

* v1.3.0: Fixed issue with operators

* v1.3.0: Maybe a fix for APIResponse

* v1.3.0: Handled APIResponse

* v1.3.0: Using cache resource instead of cache data

* v1.3.0: Reverted test app

* v2.0.0: Bumped version, Improved type hints
  • Loading branch information
SiddhantSadangi authored May 12, 2024
1 parent 1d7f124 commit 801ad77
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 82 deletions.
33 changes: 21 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ pip install st-supabase-connection

1. Import
```python
from st_supabase_connection import SupabaseConnection
from st_supabase_connection import SupabaseConnection, execute_query
```
2. Initialize
```python
Expand All @@ -170,7 +170,7 @@ pip install st-supabase-connection
key="YOUR_SUPABASE_KEY", # not needed if provided as a streamlit secret
)
```
3. Use in your app to query tables and files. Happy Streamlit-ing! :balloon:
3. Use in your app to query tables and files, and add authentication. Happy Streamlit-ing! :balloon:

## :ok_hand: Supported methods
<details close>
Expand All @@ -197,7 +197,7 @@ pip install st-supabase-connection
<details close>
<summary> Database </summary>
<ul>
<li> <code>query()</code> - Runs a cached SELECT query </li>
<li> <code>execute_query()</code> - Executes the passed query with caching enabled. </li>
<li> All methods supported by <a href="https://postgrest-py.readthedocs.io/en/latest/api/request_builders.html">postgrest-py</a>.
</details>

Expand Down Expand Up @@ -300,7 +300,7 @@ SyncBucket(id='new_bucket', name='new_bucket', owner='', public=True, created_at
### :file_cabinet: Database operations
#### Simple query
```python
>>> st_supabase_client.query("*", table="countries", ttl=0).execute()
>>> execute_query(st_supabase_client.table("countries").select("*"), ttl=0)
APIResponse(
data=[
{"id": 1, "name": "Afghanistan"},
Expand All @@ -312,20 +312,25 @@ APIResponse(
```
#### Query with join
```python
>>> st_supabase_client.query("name, teams(name)", table="users", count="exact", ttl="1h").execute()
>>> execute_query(
st_supabase_client.table("users").select("name, teams(name)", count="exact"),
ttl="1h",
)

APIResponse(
data=[
{"name": "Kiran", "teams": [{"name": "Green"}, {"name": "Blue"}]},
{"name": "Evan", "teams": [{"name": "Blue"}]},
],
count=None,
count=2,
)
```
#### Filter through foreign tables
```python
>>> st_supabase_client.query("name, countries(*)", count="exact", table="cities", ttl=None).eq(
"countries.name", "Curaçao"
).execute()
>>> execute_query(
st_supabase_client.table("cities").select("name, countries(*)", count="exact").eq("countries.name", "Curaçao"),
ttl=None,
)

APIResponse(
data=[
Expand All @@ -348,9 +353,13 @@ APIResponse(

#### Insert rows
```python
>>> st_supabase_client.table("countries").insert(
[{"name": "Wakanda", "iso2": "WK"}, {"name": "Wadiya", "iso2": "WD"}], count="None"
).execute()
>>> execute_query(
st_supabase_client.table("countries").insert(
[{"name": "Wakanda", "iso2": "WK"}, {"name": "Wadiya", "iso2": "WD"}], count="None"
),
ttl=0,
)

APIResponse(
data=[
{
Expand Down
96 changes: 57 additions & 39 deletions demo/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import streamlit as st
from st_social_media_links import SocialMediaIcons

from st_supabase_connection import SupabaseConnection, __version__
from st_supabase_connection import SupabaseConnection, __version__, execute_query

VERSION = __version__

Expand Down Expand Up @@ -53,7 +53,7 @@
)

if st.button(
"Clear the cache to fetch latest data🧹",
"Clear cache to fetch latest data🧹",
use_container_width=True,
type="primary",
):
Expand Down Expand Up @@ -298,11 +298,11 @@
)
ttl = None if ttl == "" else ttl
constructed_storage_query = f"""st_supabase.{operation}("{bucket_id}", {ttl=})"""
st.session_state["storage_disabled"] = False if bucket_id else True
st.session_state["storage_disabled"] = bool(not bucket_id)

elif operation in ["delete_bucket", "empty_bucket"]:
constructed_storage_query = f"""st_supabase.{operation}("{bucket_id}")"""
st.session_state["storage_disabled"] = False if bucket_id else True
st.session_state["storage_disabled"] = bool(not bucket_id)

elif operation == "create_bucket":
col1, col2, col3, col4 = st.columns(4)
Expand Down Expand Up @@ -335,7 +335,7 @@
)

constructed_storage_query = f"""st_supabase.create_bucket('{bucket_id}',{name=},{file_size_limit=},allowed_mime_types={allowed_mime_types},{public=})"""
st.session_state["storage_disabled"] = False if bucket_id else True
st.session_state["storage_disabled"] = bool(not bucket_id)

elif operation == "update_bucket":
if bucket_id:
Expand Down Expand Up @@ -412,7 +412,7 @@
constructed_storage_query = f"""
st_supabase.{operation}("{bucket_id}", {source=}, {file=}, destination_path="{destination_path}", {overwrite=})
"""
st.session_state["storage_disabled"] = False if all([bucket_id, file]) else True
st.session_state["storage_disabled"] = bool(not all([bucket_id, file]))
elif operation == "list_buckets":
ttl = st.text_input(
"Results cache duration",
Expand Down Expand Up @@ -441,7 +441,7 @@
constructed_storage_query = (
f"""st_supabase.{operation}("{bucket_id}", {source_path=}, {ttl=})"""
)
st.session_state["storage_disabled"] = False if all([bucket_id, source_path]) else True
st.session_state["storage_disabled"] = bool(not all([bucket_id, source_path]))

elif operation == "move":
from_path = st.text_input(
Expand All @@ -457,9 +457,7 @@
f"""st_supabase.{operation}("{bucket_id}", {from_path=}, {to_path=})"""
)

st.session_state["storage_disabled"] = (
False if all([bucket_id, from_path, to_path]) else True
)
st.session_state["storage_disabled"] = bool(not all([bucket_id, from_path, to_path]))
elif operation == "remove":
paths = st.text_input(
"Enter the paths of the objects in the bucket to remove",
Expand All @@ -468,7 +466,7 @@
)
constructed_storage_query = f"""st_supabase.{operation}("{bucket_id}", paths={paths})"""

st.session_state["storage_disabled"] = False if all([bucket_id, paths]) else True
st.session_state["storage_disabled"] = bool(not all([bucket_id, paths]))
elif operation == "list_objects":
lcol, rcol = st.columns([3, 1])
path = lcol.text_input(
Expand Down Expand Up @@ -797,7 +795,7 @@
placeholder="countries",
)

lcol, mcol, rcol = st.columns(3)
lcol, mcol, rcol = st.columns([2, 2, 3])
request_builder = lcol.selectbox(
"Select the query type",
options=["select", "insert", "upsert", "update", "delete"],
Expand All @@ -820,41 +818,66 @@
placeholder = (
value
) = """[{"name":"Wakanda","iso2":"WK"},{"name":"Wadiya","iso2":"WD"}]"""
upsert = rcol_placeholder.checkbox(
rcol1, rcol2 = rcol_placeholder.columns(2)
ttl = rcol1.text_input(
"Cache duration",
value=0,
placeholder=0,
help="Set as `0` to always fetch the latest results (recommended for DML), or leave blank to cache indefinitely.",
)
upsert = rcol2.checkbox(
label="Upsert",
help="Whether the query should be an upsert",
)
elif request_builder == "select":
request_builder_query_label = "Enter the columns to fetch as comma-separated strings"
placeholder = value = "*"
ttl = rcol_placeholder.text_input(
"Result cache duration",
value=0,
value=None,
placeholder=None,
help="Set as `0` to always fetch the latest results, and leave blank to cache indefinitely.",
help="Set as `0` to always fetch the latest results, or leave blank to cache indefinitely.",
)
placeholder = value = "*"
elif request_builder == "delete":
request_builder_query_label = "Delete query"
placeholder = value = "Delete does not take a request builder query"
ttl = rcol_placeholder.text_input(
"Results Cache duration",
value=0,
placeholder=0,
help="Set as `0` to always fetch the latest results (recommended for DML), or leave blank to cache indefinitely.",
)
elif request_builder == "upsert":
request_builder_query_label = "Enter the rows to upsert as json (for single row) or array of jsons (for multiple rows)"
placeholder = value = """{"name":"Wakanda","iso2":"WK", "continent":"Africa"}"""
ignore_duplicates = rcol_placeholder.checkbox(
rcol1, rcol2 = rcol_placeholder.columns(2)
ttl = rcol1.text_input(
"Cache duration",
value=0,
placeholder=0,
help="Set as `0` to always fetch the latest results (recommended for DML), or leave blank to cache indefinitely.",
)
ignore_duplicates = rcol2.checkbox(
label="Ignore duplicates",
help="Whether duplicate rows should be ignored",
)
elif request_builder == "update":
request_builder_query_label = "Enter the rows to update as json (for single row) or array of jsons (for multiple rows)"
placeholder = value = """{"iso3":"N/A","continent":"N/A"}"""
ttl = rcol_placeholder.text_input(
"Result cache duration",
value=0,
placeholder=0,
help="Set as `0` to always fetch the latest results (recommended for DML), or leave blank to cache indefinitely.",
)

request_builder_query = st.text_input(
label=request_builder_query_label,
placeholder=placeholder,
value=value,
help="[RequestBuilder API reference](https://postgrest-py.readthedocs.io/en/latest/api/request_builders.html#postgrest.AsyncRequestBuilder)",
disabled=request_builder == "delete",
)

if request_builder == "upsert" and not ignore_duplicates:
on_conflict = st.text_input(
label="Enter the columns to be considered UNIQUE in case of conflicts as comma-separated values",
Expand All @@ -876,7 +899,7 @@

if request_builder not in ["insert", "update", "upsert"]:
operators = st.text_input(
label="Chain any operators and filters you want 🔗",
label="Chain any modifiers and filters you want 🔗",
value=""".eq("continent","Asia").order("name",desc=True).limit(5)""",
placeholder=""".eq("continent","Asia").order("name",desc=True).limit(5)""",
help="List of all available [operators](https://postgrest-py.readthedocs.io/en/latest/api/request_builders.html#postgrest.AsyncSelectRequestBuilder) and [filters](https://postgrest-py.readthedocs.io/en/latest/api/filters.html#postgrest.AsyncFilterRequestBuilder)",
Expand All @@ -887,18 +910,16 @@
ttl = None if ttl == "" else ttl

if operators:
if request_builder == "select":
constructed_db_query = f"""st_supabase.query({request_builder_query}, {table=}, {ttl=}){operators}.execute()"""
else:
constructed_db_query = f"""st_supabase.table("{table}").{request_builder}({request_builder_query}){operators}.execute()"""
constructed_db_query = (
f"""execute_query(st_supabase.table("{table}").select({request_builder_query}){operators}, {ttl:=})"""
if request_builder == "select"
else f"""execute_query(st_supabase.table("{table}").{request_builder}({request_builder_query}){operators}, {ttl:=})"""
)
elif request_builder == "select":
constructed_db_query = f"""execute_query(st_supabase.table("{table}").select({request_builder_query}), {ttl=})"""
else:
if request_builder == "select":
constructed_db_query = (
f"""st_supabase.query({request_builder_query}, {table=}, {ttl=}).execute()"""
)
else:
constructed_db_query = f"""st_supabase.table("{table}").{request_builder}({request_builder_query}).execute()"""
st.write("**Constructed code**")
constructed_db_query = f"""execute_query(st_supabase.table("{table}").{request_builder}({request_builder_query}), {ttl=})"""
st.write("**Constructed query**")
st.code(constructed_db_query)

lcol, rcol = st.columns([2, 1])
Expand All @@ -909,7 +930,7 @@
)

if rcol.button(
"Run query 🏃",
"Execute query 🏃",
use_container_width=True,
type="primary",
disabled=st.session_state["project"] == "demo"
Expand All @@ -921,16 +942,16 @@
key="run_db_query",
):
try:
data, count = eval(constructed_db_query)
response = eval(constructed_db_query)

if count_method:
st.write(
f"**{count[-1]}** rows {request_builder}ed. `count` does not take `limit` into account."
f"**{response.count}** rows {request_builder}ed. `count` does not take `limit` into account."
)
if view == "Dataframe":
st.dataframe(data[-1], use_container_width=True)
st.dataframe(response.data, use_container_width=True)
else:
st.write(data[-1])
st.write(response.data)
except ValueError:
if count_method == "planned":
st.error(
Expand Down Expand Up @@ -1076,13 +1097,10 @@
else:
raise Exception("No logged-in user session. Log in or sign up first.")

elif auth_operation == "sign_out":
auth_success_message = None

if auth_success_message:
st.success(auth_success_message)

if response != None:
if response is not None:
with st.expander("JSON response"):
st.write(response.dict())

Expand Down
1 change: 1 addition & 0 deletions demo/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
st-social-media-links
st_supabase_connection
streamlit<1.34 # TODO: Update app to remove components.v1 usage
Loading

0 comments on commit 801ad77

Please sign in to comment.