Skip to content

Commit

Permalink
πŸ‘·β€β™‚οΈβ„οΈ ↝ Merge pull request #3 from DeSci-md/actions
Browse files Browse the repository at this point in the history
πŸŒŒπŸ‘·β€β™‚οΈ ↝ Adding Docker, Flake & Actions config
  • Loading branch information
Gizmotronn authored Feb 17, 2023
2 parents d34a118 + 5672e46 commit c72a382
Show file tree
Hide file tree
Showing 19 changed files with 508 additions and 1 deletion.
Binary file added .github/.DS_Store
Binary file not shown.
23 changes: 23 additions & 0 deletions .github/workflows/nodemon.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
- name: Setup Node.js environment
uses: actions/[email protected]
with:
# Set always-auth in npmrc.
always-auth: # optional, default is false
# Version Spec of the version to use. Examples: 12.x, 10.15.1, >=10.15.0.
node-version: # optional
# File containing the version Spec of the version to use. Examples: .nvmrc, .node-version, .tool-versions.
node-version-file: # optional
# Target architecture for Node to use. Examples: x86, x64. Will use system architecture by default.
architecture: # optional
# Set this option if you want the action to check for the latest available version that satisfies the version spec.
check-latest: # optional
# Optional registry to set up for auth. Will set the registry in a project level .npmrc and .yarnrc file, and set up auth to read in from env.NODE_AUTH_TOKEN.
registry-url: # optional
# Optional scope for authenticating against scoped registries. Will fall back to the repository owner when using the GitHub Packages registry (https://npm.pkg.github.com/).
scope: # optional
# Used to pull node distributions from node-versions. Since there's a default, this is typically not supplied by the user. When running this action on github.com, the default value is sufficient. When running on GHES, you can pass a personal access token for github.com if you are experiencing rate limiting.
token: # optional, default is ${{ github.server_url == 'https://github.com' && github.token || '' }}
# Used to specify a package manager for caching in the default directory. Supported values: npm, yarn, pnpm.
cache: # optional
# Used to specify the path to a dependency file: package-lock.json, yarn.lock, etc. Supports wildcards or a list of file names for caching multiple dependencies.
cache-dependency-path: # optional
36 changes: 36 additions & 0 deletions .github/workflows/python-pipeline.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: Python Container for Text Extraction

on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]

permissions:
contents: read

jobs:
build:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v3
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"jira-plugin.workingProject": ""
}
102 changes: 102 additions & 0 deletions Ansible/classify.ipynb

Large diffs are not rendered by default.

44 changes: 44 additions & 0 deletions Ansible/habanero.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
"""
Script for extracting machine readable text from a web publication/paper which
needs to be downloaded
Adapted from the process described by Wang et. al. 2022 10.1038/s41524-021-00687-2
Crossref API info: https://api.crossref.org/swagger-ui/index.html
habanero package info: https://pypi.org/project/habanero/
"""
import time # for using time.sleep() if needing to pause data requests
from habanero import Crossref
from habanero import cn


cr = Crossref()

# Setting info for mailto for contact email and user-agent for description of use case
# For attempts to get into the 'polite pool' for paper requests through the API
cr.mailto = '[email protected]'
cr.ua_string = 'Python script for retrieving paper info from query for research.'

# query section, request results based on a search
# cursor='*' alloy 'deep paging' (according to function documentation in crossref.py)
# cursor_max sets the max number of records to retrieve, by default it's 20 I think
# seems to do max results returned from deep paging in sets of 20, e.g. a request of 15 still gives 20
n = 2 # multiple of 20 for deep paging
request = cr.works(query = "Automated pipeline for superalloy data by text mining", cursor='*', cursor_max=n*20, progress_bar=True) # test query searching for papers based on a string

allowed_types = ['proceedings-article', 'book-chapter', 'dissertation', 'journal-article'] # specifying document types of parse
# print the title and the type of item from the results
for i in range(n):
for j in range(len(request[i]['message']['items'])):
if request[i]['message']['items'][j]['type'] not in allowed_types: # skipping if not a type wanted
continue
title = request[i]['message']['items'][j]['title'][0]
type = request[i]['message']['items'][j]['type']
print(f'{title}, {type}') # print the title and type of each results
time.sleep(0.25) # sleep between prints so it's not too fast


request_doi = cr.works(ids = '10.1038/s41524-021-00687-2') # search for a specific paper using a DOI number
print(request_doi['message']['title'][0])
citation = cn.content_negotiation(ids = '10.1038/s41524-021-00687-2', format = 'text') # get citation for the DOI
print(citation)
File renamed without changes.
21 changes: 21 additions & 0 deletions DOCKERFILE
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
FROM python:3.9.16-bullseye

ENV PROJECT_DIR /app

WORKDIR /app
RUN pip install pipenv

# COPY requirements.txt requirements.txt
# RUN pip install -r requirements.txt

WORKDIR ${PROJECT_DIR}
COPY Pipfile .
# COPY Pipfile.lock .

RUN pipenv install
# RUN pipenv install --system
# RUN pipenv install --system --deploy
COPY . .

# CMD ["python", "-m", "flask", "run", "--host=0.0.0.0"]
CMD ["pipenv", "run", "flask", "run", "--host=0.0.0.0"]
15 changes: 15 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Convenient shortcuts for local development

build:
docker build --tag sytizen-server .

# Default port 5000 is in use on Ventura Macs
# 7355 chosen for TESS
run:
docker compose up --build --detach

logs:
docker compose logs --follow

stop:
docker compose down
20 changes: 20 additions & 0 deletions Pipfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"

[packages]
moralis = "*"
flask = "*"
flask_cors = "*"
python-dotenv = "*"
matplotlib = "*"
lightkurve = "*"
nbformat = "*"
gunicorn = "*"
sqlalchemy = "*"
psycopg2-binary = "*"
flask-sqlalchemy = "*"
thirdweb-sdk = "*"

[dev-packages]
46 changes: 45 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,48 @@ pip install habanero
```

# Web Interface/Client
Current progress is being tracked by [signal-k/client](http://github.com/Signal-K/client/pull/19) and will be added as a git submodule once our generator & metadata smart contracts are completed
Current progress is being tracked by [signal-k/client](http://github.com/Signal-K/client/pull/19) and will be added as a git submodule once our generator & metadata smart contracts are completed

## Generative API
### Containerisation for local development

In root dir, `Dockerfile` runs the Flask Server in a container.

The `docker-compose.yml` defines a cluster with the Server and a local PostgreSQL container

For convenience, a Makefile supports the following simple operations:

* `make build` builds an image from the current working copy
* `make run` starts the cluster, rebuilding if necessary
* `make logs` tails the logs for both containers
* `make stop` stops and deletes the clusters

For rapid iteration, I use:
`make stop run logs`

#### Prerequisites

You will need to have a Docker environment available... Docker Desktop or an equivalent

#### Previous Issues

##### ThirdWeb

The build step (`make build`) fails whilst running `pipenv install` during the build of the Docker image.

`thirdweb-sdk` caused errors on `pipenv install`. The output was long and ugly; but a resolution has been found.
The problem was the use of the `slim-` base image. Switching from `p`ython:3.9.9-slim-bullseye` to `python:3.9.9-bullseye` avoided the problem.

##### Ventura - Flask default port 5000

Flask runs by default on port 5000. However, on macos Ventura, there is a system service "Airplay Receiver" listening on this port.

In this case, `localhost:5000` does not reach the Flask app, although `127.0.0.1:5000` does.

The easiest solution is to turn off the Airplay Receiver service; an alternative is to run Flask on a different port... perhaps 7355 for TESS?

#### Current Issue

The server responds to `http://localhost:5000` with a classic "Hello World"

Several of the blueprints in `app.py` are commented out since they have dependencies on ThirdWeb
2 changes: 2 additions & 0 deletions Server/.flaskenv
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
FLASK_APP=app.py
FLASK_DEBUG=1
28 changes: 28 additions & 0 deletions Server/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from flask import Flask, request, make_response, jsonify, Blueprint
from datetime import datetime, timedelta

"""# Jupyter interaction
import io, os, sys, types # https://jupyter-notebook.readthedocs.io/en/stable/examples/Notebook/Importing%20Notebooks.html
from IPython import get_ipython
from nbformat import read
from IPython.core.interactiveshell import Interactiveshell"""

# Flask blueprints/routes
from auth.moralisHandler import moralis_handler
from contracts.planetDrop import planet_drop
from database.connection import database_connection
# from database.unity-integration import unity_database_connection
# from ansible.tic_classify import tic_classify
# from ansible.classify import lightkurve_handler

app = Flask(__name__)
app.register_blueprint(moralis_handler, url_prefix='/moralis-auth')
app.register_blueprint(planet_drop, url_prefix='/planets')
app.register_blueprint(database_connection, url_prefix='/database')
# app.register_blueprint(unity-database_connection, url_prefix='/database-unity')
# app.register_blueprint(tic_classify, url_prefix='/lightkurve')
# app.register_blueprint(lightkurve__handler, url_prefix='/lightkurve-handle')

@app.route('/')
def index():
return "Hello World"
38 changes: 38 additions & 0 deletions Server/database/connection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
from flask import Blueprint, request
from dotenv import load_dotenv
import psycopg2
import os

database_connection = Blueprint('database_connection', __name__)

load_dotenv()
url = os.getenv("DATABASE_URL")
print("DATABASE",url)
connection = psycopg2.connect(dsn=url, user='postgres')

# PostgreSQL queries
CREATE_USERS_TABLE = (
'CREATE TABLE IF NOT EXISTS users (id SERIAL PRIMARY KEY, address TEXT);'
)
CREATE_PLANETS_TABLE = (
"""CREATE TABLE IF NOT EXISTS planets (user_id INTEGER, temperature REAL, date TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE CASCADE);"""
)
INSERT_USER_RETURN_ID = 'INSERT INTO users (address) VALUES (%s) RETURNING id;'
INSERT_PLANET = (
'INSERT INTO planets (user_id, temperature, date) VALUES (%s, %s, %s);'
)

# User Management
@database_connection.post('/api/user')
def addUser():
data = request.get_json()
address = data['address']

# Connect to the database
with connection:
with connection.cursor() as cursor:
cursor.execute(CREATE_USERS_TABLE)
cursor.execute(INSERT_USER_RETURN_ID, (address,))
user_id = cursor.fetchone()[0]

return {'id': user_id, 'message': f"User {address} created"}, 201
27 changes: 27 additions & 0 deletions Server/database/retrieveData.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Networking;

public class GetMe : MonoBehaviour {

public void GetData()
{
StartCoroutine(GetRequest("http://127.0.0.1:5000/unity-database_connection/list"));
}

IEnumerator GetRequest(string uri)
{
UnityWebRequest uwr = UnityWebRequest.Get(uri);
yield return uwr.SendWebRequest();

if (uwr.isNetworkError)
{
Debug.Log("Error While Sending: " + uwr.error);
}
else
{
Debug.Log("Received: " + uwr.downloadHandler.text);
}
}
}
69 changes: 69 additions & 0 deletions contracts/serverDropConnection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
from flask import Blueprint, request
from thirdweb import ThirdwebSDK

planet_drop = Blueprint('planet_drop', __name__)

# Get NFT balance for Planet Edition Drop (https://thirdweb.com/goerli/0xdf35Bb26d9AAD05EeC5183c6288f13c0136A7b43/code)
@planet_drop.route('/balance')
def get_balance():
# Planet Edition Drop Contract
network = "goerli"
sdk = ThirdwebSDK(network)
contract = sdk.get_edition_drop("0xdf35Bb26d9AAD05EeC5183c6288f13c0136A7b43")

address = "0xCdc5929e1158F7f0B320e3B942528E6998D8b25c"
token_id = 0
balance = contract.balance_of(address, token_id)

return str(balance)

@planet_drop.route('/get_planet')
def get_planet():
network = 'goerli'
sdk = ThirdwebSDK(network)

# Getting Planet (candidate nfts)
contract = sdk.get_contract("0x766215a318E2AD1EbdC4D92cF2A3b70CBedeac31")
tic55525572 = contract.call("uri", 0) # For token id 0, tic id 55525572
return str(tic55525572)

@planet_drop.route('/mint_planet', methods=["GET", "POST"])
def create_planet():
#Output from IPFS gateway: #{"name":"TIC 55525572","description":"Exoplanet candidate discovered by TIC ID. \n\nReferences: https://exoplanets.nasa.gov/exoplanet-catalog/7557/toi-813-b/\nhttps://exofop.ipac.caltech.edu/tess/target.php?id=55525572\n\nDeepnote Analysis: https://deepnote.com/workspace/star-sailors-49d2efda-376f-4329-9618-7f871ba16007/project/Star-Sailors-Light-Curve-Plot-b4c251b4-c11a-481e-8206-c29934eb75da/%2FMultisector%20Analysis.ipynb","image":"ipfs://Qma2q8RgX1X2ZVcfnJ7b9RJeKHzoTXahs2ezzqQP4f5yvT/0.png","external_url":"","background_color":"","attributes":[{"trait_type":"tic","value":"55525572"},{"trait_type":"mass_earth","value":"36.4"},{"trait_type":"type","value":"neptune-like"},{"trait_type":"orbital_period","value":"83.9"},{"trait_type":"eccentricity","value":"0.0"},{"trait_type":"detection_method","value":"transit"},{"trait_type":"orbital_radius","value":"0.423"},{"trait_type":"radius_jupiter","value":"0.599"},{"trait_type":"distance_earth","value":"858"}]}
# Multiple instances for the same ID will be created (as long as the traits are the same), one for each person, as each planet instance appeared differently and will be manipulated differently by users
# Creating a planet nft based on discovery
network = 'goerli'
sdk = ThirdwebSDK(network)
contract = sdk.get_contract("0x766215a318E2AD1EbdC4D92cF2A3b70CBedeac31")
#data = contract.call("lazyMint", _amount, _baseURIForTokens, _data) (POST data)
# Interaction flow -> https://www.notion.so/skinetics/Sample-Planets-Contract-4c3bdcbca4b9450382f9cc4e72e081f7

"""
# Flask/api routes
@app.route('/planet')
def planet():
return jsonify({'planet' : 'planet'})
@app.post('/select_planet')
def select_planet():
data = request.get_json()
planetId = data['planetId']
planetName = data['planetName']
planetTic = data['planetTic']
sector_data = lk.search_lightcurve(planetTic, author = 'SPOC', sector = 23)
#lc = sector_data.download()
#lc.plot()
return sector_data
# Show planet data on frontend
@app.post('/show_planet') # Can we do some calculation for nft revealing using this (i.e. mint nft after classification)
def show_tic():
lc = sector_data.plot()
return lc
@app.post('/mint-planet')
def mint_planet():
data = request.get_json()
_receiver = data['profileAddress']
_tokenId = data['tokenId']
_quantity = 1
data = contract.call("claim", _receiver, _tokenId, _quantity)
app.run(host='0.0.0.0', port=8080)
"""
Loading

0 comments on commit c72a382

Please sign in to comment.