Skip to content

Commit

Permalink
Build related fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
akariv committed Nov 11, 2024
1 parent 9b3baec commit 1ec9d40
Show file tree
Hide file tree
Showing 9 changed files with 30 additions and 20 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
else
CACHE_FROM_ARG=""
fi &&\
docker build $CACHE_FROM_ARG --build-arg VERSION=${GITHUB_SHA} -t app -f docker/Dockerfile . &&\
docker build $CACHE_FROM_ARG --build-arg VERSION=${GITHUB_SHA} -t app -f docker/api-server/Dockerfile . &&\
docker tag app "${DOCKER_APP_IMAGE_NAME}:${GITHUB_SHA}" &&\
docker push "${DOCKER_APP_IMAGE_NAME}:${GITHUB_SHA}" &&\
if [ "${GITHUB_REF}" == "refs/heads/main" ]; then
Expand Down
4 changes: 3 additions & 1 deletion compose.dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,11 @@ services:
dockerfile: docker/api-server/Dockerfile
context: .
ports:
- "8000:80"
- "8000:8000"
volumes:
- ./odds.config.yaml:/srv/odds.config.yaml
- ./.caches/:/srv/.caches/
- certs:/docker/certs
command: utils/api_server.sh
db:
image: postgres:16
Expand Down Expand Up @@ -82,6 +83,7 @@ services:
until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done;
echo "Setting kibana_system password";
until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done;
sleep 10;
echo "All done!";
'
healthcheck:
Expand Down
13 changes: 12 additions & 1 deletion docker/api-server/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,14 @@
# Pulled July 19, 2023
FROM node:lts-bookworm-slim AS frontend

WORKDIR /app

COPY ui/package*json /app/
RUN apt-get update
RUN apt-get install -y ca-certificates
RUN npm install
COPY ui/ /app/
RUN npm run -- ng build

FROM --platform=linux/amd64 python:3.12
RUN apt-get update && apt-get install -y libleveldb-dev ca-certificates && apt-get clean
RUN pip install --upgrade pip
Expand All @@ -10,6 +20,7 @@ COPY setup.py ./
RUN pip install -e .
COPY utils ./utils
ENV PYTHONUNBUFFERED=1
COPY --from=frontend /app/dist/ask/browser /srv/ui
RUN mkdir /temp-cache/ && chmod 777 /temp-cache/
ENV RESOURCE_PROCESSOR_CACHE_DIR=/temp-cache
ENTRYPOINT ["/bin/bash", "-c"]
4 changes: 4 additions & 0 deletions odds/api/server.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from typing import List, Dict, Any, Optional

from .answer import answer_question
Expand All @@ -23,6 +24,9 @@
allow_headers=["*"],
)

# Serve static files from the 'ui' directory on the '/' endpoint
app.mount("/", StaticFiles(directory="ui", html=True), name="static")

@app.get("/datasets")
async def search_datasets_handler(query: str) -> List[Dict[str, Any]]:
return await search_datasets(query, None)
Expand Down
2 changes: 1 addition & 1 deletion odds/common/vectordb/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from .indexer import Indexer
from ..select import select
from ..embedder import embedder
from .chromadb.chromadb_indexer import ChromaDBIndexer
# from .chromadb.chromadb_indexer import ChromaDBIndexer
from .es.es_indexer import ESIndexer

indexer: Indexer = select('Indexer', locals())(embedder.vector_size())
5 changes: 3 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
chromadb
httpx
aiofiles
sqlalchemy
Expand All @@ -14,4 +13,6 @@ plyvel
fastapi
uvicorn
aiosqlite
elasticsearch[async]
elasticsearch[async]
openai
pyyaml
12 changes: 2 additions & 10 deletions test_assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,16 +113,8 @@

async def search_datasets(query: str):
print('SEARCH DATASETS:', query)
query_terms = query.split(',')
query_terms = [term.strip() for term in query_terms]
query_terms = [term for term in query_terms if term]
print('QUERY TERMS:', query_terms)
embeddings = await asyncio.gather(*[embedder.embed(name) for name in query_terms])
dataset_ids = await asyncio.gather(*[indexer.findDatasets(embedding) for embedding in embeddings])
dataset_ids = [x for y in dataset_ids for x in y]
dataset_ids = [x[0] for x in Counter(dataset_ids).most_common(10)]
print('DATASET IDS:', dataset_ids)
datasets = await asyncio.gather(*[store.getDataset(id) for id in dataset_ids])
embedding = await embedder.embed(query)
datasets = await indexer.findDatasets(embedding)
catalogs = [catalog_repo.get_catalog(dataset.catalogId) for dataset in datasets]
response = [
dict(
Expand Down
6 changes: 3 additions & 3 deletions tester2.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@
async def main():
query = sys.argv[1]
b = await embedder.embed(query)
ids = await indexer.findDatasets(b)
datasets = [await metadata_store.getDataset(id) for id in ids]
datasets = await indexer.findDatasets(b, query=query)
# datasets = [await metadata_store.getDataset(id) for id in ids]
for dataset in datasets:
print(' - ' + dataset.better_title)
print(f' - {dataset.id}: {dataset.better_title}')


if __name__ == '__main__':
Expand Down
2 changes: 1 addition & 1 deletion ui/projects/ask/src/app/home/home.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ export class HomeComponent {
if (this.question) {
this.loading = true;
const encoded = encodeURIComponent(this.question);
this.http.get('http://localhost:8000/answer', { params: {q: encoded }})
this.http.get('/answer', { params: {q: encoded }})
.pipe(
catchError((error) => {
this.answer = 'Error: ' + error.message;
Expand Down

0 comments on commit 1ec9d40

Please sign in to comment.