diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index 42e9fba1..d8ffb339 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -1,6 +1,9 @@ name: Black Formatter -on: [pull_request] +on: + pull_request: + paths: + - "**.py" jobs: lint: diff --git a/.github/workflows/docker-test.yml b/.github/workflows/docker-test.yml index 3b14c7ef..976174bf 100644 --- a/.github/workflows/docker-test.yml +++ b/.github/workflows/docker-test.yml @@ -1,12 +1,27 @@ name: Docker Build Test -on: [pull_request] +on: + pull_request: + paths: + - "**.py" + - "Dockerfile" + - "run.sh" + - "requirements.txt" jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - name: "Build Docker Image" run: | - docker build . + docker build -t labconnect-backend . + + - name: "Run Docker Container" + run: | + docker run -d --name labconnect-backend-container labconnect-backend + sleep 60 + docker logs labconnect-backend-container + docker stop labconnect-backend-container + docker rm labconnect-backend-container diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index ed957d6e..91267bca 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -1,6 +1,10 @@ name: Pytest -on: [pull_request] +on: + pull_request: + paths: + - "**.py" + - "requirements.txt" jobs: build: diff --git a/.gitignore b/.gitignore index 4debafbe..748c3b62 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,4 @@ env/ *.DS_Store *.db .coverage -*.vscode \ No newline at end of file +*.vscode diff --git a/Dockerfile b/Dockerfile index abb120f3..627f70c1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,19 +2,22 @@ FROM --platform=linux/amd64 python:3.12.4-alpine3.20 WORKDIR /app +RUN apk add --no-cache postgresql-dev gcc python3-dev musl-dev + COPY requirements.txt . RUN pip install --no-cache-dir -r requirements.txt -COPY labconnect /app/labconnect -COPY run.py . +COPY labconnect . +COPY app.py . COPY db_init.py . COPY config.py . COPY run.sh . +COPY migrations . RUN chmod +x run.sh HEALTHCHECK --interval=10s --timeout=5s --start-period=5s --retries=3 \ - CMD wget --no-verbose --tries=1 --spider http://localhost:8000 || exit 1 + CMD wget --no-verbose --tries=1 --spider http://0.0.0.0:9000 || exit 1 -EXPOSE 8000 +EXPOSE 9000 -CMD ["./run.sh"] +CMD ["/app/run.sh"] diff --git a/Makefile b/Makefile index 4812889a..e0d3b71d 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ clean: pystarter clean run: - gunicorn run:app -w 6 --preload --max-requests-jitter 300 --bind 0.0.0.0:8000 + gunicorn app:app -w 6 --preload --max-requests-jitter 300 --bind 0.0.0.0:9000 develop: python3 run.py @@ -17,9 +17,4 @@ drop: python3 db_init.py clear create: - python3 db_init.py create - -docker-build: - docker build -t labconnect-backend . - docker tag labconnect-backend enchanter77/labconnect-backend - docker push enchanter77/labconnect-backend \ No newline at end of file + python3 db_init.py create \ No newline at end of file diff --git a/README.md b/README.md index 99cd4e10..a7d3e86e 100644 --- a/README.md +++ b/README.md @@ -103,10 +103,16 @@ ``` ## Deployment -* TBD, planning to use an RPI VM +Create PRs to release branch, upon merge a new docker container will be created and pushed. This will be planned for weekly pushes on Tuesdays. ## Production - * Run gunicorn +Use the docker container in the packages tab. You can set these environment variables: +- SECRET_KEY +- JWT_SECRET_KEY +- FRONTEND_URL +- DB + + * Run gunicorn to test how the service runs in production ```sh $ make run ``` @@ -125,20 +131,23 @@ Running list of contributors to the LabConnect project: ### Rensselaer Center for Open Source Development Team -- **Siddhi W** [Frontend] +- **Siddhi W** [Frontend / Backend] - **Mrunal A** [Frontend / Backend] -- **Abid T** [Frontend / Backend] -- **Sarah** [Backend] -- **Ramzey** [Backend] -- **Will** [Frontend] -- **Nelson** [Backend] +- **Sarah W** [Backend] +- **Ramzey Y** [Backend] +- **Will B** [Frontend] +- - **Sidarth E** [Frontend] + ### Past Rensselaer Center for Open Source Development Team +- **Abid T** [Frontend / Backend] +- **Nelson** [Backend] - **Duy L** [Database Systems] - **Yash K** [Frontend] - **Sam B** [Scraping / Integration] + ### Special Thanks We extend our special thanks support and opportunity provided by the RCOS community. diff --git a/run.py b/app.py similarity index 78% rename from run.py rename to app.py index d03f9cc0..dba2c97c 100644 --- a/run.py +++ b/app.py @@ -3,4 +3,4 @@ app = create_app() if __name__ == "__main__": - app.run(port=8000) + app.run(port=9000) diff --git a/config.py b/config.py index f6ebbbb7..d9c0a679 100644 --- a/config.py +++ b/config.py @@ -25,7 +25,8 @@ class TestingConfig(Config): DEBUG = True # Using SQLLITE locally - SQLALCHEMY_DATABASE_URI = f"sqlite:///{os.path.join(basedir, 'database.db')}" + # SQLALCHEMY_DATABASE_URI = f"sqlite:///{os.path.join(basedir, 'database.db')}" + SQLALCHEMY_DATABASE_URI = "postgresql+psycopg2://postgres:root@localhost/labconnect" class ProductionConfig(Config): diff --git a/db_init.py b/db_init.py index 7c35ce42..13a59f3c 100644 --- a/db_init.py +++ b/db_init.py @@ -65,10 +65,15 @@ "School of Engineering", ), ( - "Areonautical Engineering", + "Aeronautical Engineering", "flying, need for speed", "School of Engineering", ), + ( + "Material Science", + "Creating the best materials", + "School of Engineering", + ), ) for row_tuple in rpi_departments_rows: @@ -78,6 +83,13 @@ db.session.add(row) db.session.commit() + class_years_rows = (2024, 2025, 2026, 2027, 2028, 2029, 2030, 2031) + + for row_item in class_years_rows: + row = ClassYears(class_year=row_item, active=True) + db.session.add(row) + db.session.commit() + lab_manager_rows = ( ("led", "Duy", "Le", "Computer Science"), ("turner", "Wes", "Turner", "Computer Science"), @@ -235,6 +247,7 @@ ("BIOL", "Biological Science"), ("MATH", "Mathematics"), ("COGS", "Cognitive Science"), + ("PHYS", "Physics"), ) for row_tuple in majors_rows: @@ -242,13 +255,6 @@ db.session.add(row) db.session.commit() - class_years_rows = (2024, 2025, 2026, 2027, 2028, 2029, 2030, 2031) - - for row_item in class_years_rows: - row = ClassYears(class_year=row_item, active=True) - db.session.add(row) - db.session.commit() - # https://www.geeksforgeeks.org/datetime-timezone-in-sqlalchemy/ # https://www.tutorialspoint.com/handling-timezone-in-python diff --git a/labconnect/__init__.py b/labconnect/__init__.py index b1c2be3f..b760d48a 100644 --- a/labconnect/__init__.py +++ b/labconnect/__init__.py @@ -5,6 +5,7 @@ # Import Flask modules from flask import Flask from flask_cors import CORS + from flask_jwt_extended import ( JWTManager, create_access_token, @@ -12,11 +13,12 @@ get_jwt_identity, ) from flask_sqlalchemy import SQLAlchemy - +from flask_migrate import Migrate from labconnect.helpers import OrJSONProvider # Create Database object db = SQLAlchemy() +migrate = Migrate() jwt = JWTManager() @@ -42,6 +44,7 @@ def initialize_extensions(app) -> None: # Since the application instance is now created, pass it to each Flask # extension instance to bind it to the Flask application instance (app) db.init_app(app) + migrate.init_app(app, db) jwt.init_app(app) app.json = OrJSONProvider(app) diff --git a/labconnect/main/opportunity_routes.py b/labconnect/main/opportunity_routes.py index 49723b28..5bb300c5 100644 --- a/labconnect/main/opportunity_routes.py +++ b/labconnect/main/opportunity_routes.py @@ -14,6 +14,7 @@ RecommendsClassYears, RecommendsMajors, RecommendsCourses, + User, ) from labconnect.helpers import LocationEnum @@ -21,6 +22,39 @@ from . import main_blueprint +@main_blueprint.route("/searchOpportunity/", methods=["GET"]) +def searchOpportunity(input: str): + # Perform a search + stmt = ( + db.select(Opportunities) + .where( + ( + Opportunities.search_vector.match(input) + ) # Full-text search using pre-generated tsvector + | ( + db.func.similarity(Opportunities.name, input) >= 0.1 + ) # Fuzzy search on the 'name' field + | ( + db.func.similarity(Opportunities.description, input) >= 0.1 + ) # Fuzzy search on the 'description' field + ) + .order_by( + db.func.similarity( + Opportunities.name, input + ).desc() # Order by similarity for fuzzy search results + ) + ) + + data = db.session.execute(stmt).scalars().all() + + results = [] + + for opportunity in data: + results.append(opportunity.to_dict()) + + return results + + @main_blueprint.get("/opportunity") def getOpportunity2(): @@ -121,7 +155,8 @@ def packageIndividualOpportunity(opportunityInfo): data["department"] = queryInfo[0][1].department_id for i, item in enumerate(queryInfo): - data["author"] += item[1].name + data["author"] += item[1].getName() + # data["author"] += "look at def packageIndividualOpportunity(opportunityInfo):" if i != len(queryInfo) - 1: data["author"] += ", " @@ -151,7 +186,7 @@ def packageOpportunityCard(opportunity): professorInfo = "" for i, item in enumerate(data): - professorInfo += item[1].name + professorInfo += item[1].getName() if i != len(data) - 1: professorInfo += ", " @@ -188,7 +223,8 @@ def getOpportunity(opp_id: int): return {"data": oppData} -@main_blueprint.get("/opportunity/filter") +# @main_blueprint.get("/opportunity/filter") +@main_blueprint.route("/opportunity/filter", methods=["GET", "POST"]) def filterOpportunities(): if not request.data: @@ -491,10 +527,11 @@ def getOpportunityByProfessor(rcs_id: str): def getProfessorOpportunityCards(rcs_id: str): if request.method == "GET": # query database for opportunity + user = db.first_or_404(db.select(User).where(User.email == rcs_id)) query = db.session.execute( db.select(Opportunities, Leads) - .where(Leads.lab_manager_id == rcs_id) + .where(Leads.lab_manager_id == user.lab_manager_id) .join(Opportunities, Leads.opportunity_id == Opportunities.id) ) diff --git a/labconnect/main/routes.py b/labconnect/main/routes.py index 80ca1184..9c2b4393 100644 --- a/labconnect/main/routes.py +++ b/labconnect/main/routes.py @@ -61,18 +61,25 @@ def profile(): return result -@main_blueprint.route("/department") -def department(): - - if not request.data: - abort(400) +@main_blueprint.get("/departments") +def departmentCards(): + data = db.session.execute( + db.select(RPIDepartments.name, RPIDepartments.school_id) + ).all() + results = [ + { + "title": department.name, + "school": department.school_id, + "image": "https://cdn-icons-png.flaticon.com/512/5310/5310672.png", + } + for department in data + ] - json_request_data = request.get_json() + return results - if not json_request_data: - abort(400) - department = json_request_data.get("department", None) +@main_blueprint.get("/departments/") +def departmentDetails(department: str): if not department: abort(400) @@ -83,37 +90,26 @@ def department(): result = department_data.to_dict() - prof_data = db.session.execute( - db.select( - LabManager.id, User.preferred_name, User.last_name, User.id.label("rcs_id") - ) - .where(LabManager.department_id == department) - .join(User, LabManager.id == User.lab_manager_id) - ).scalars() - - query = ( - db.select(Opportunities) - .where(Opportunities.active == True) - .limit(20) - .join(Leads, Opportunities.id == Leads.opportunity_id) - .join(LabManager, Leads.lab_manager_id == LabManager.id) - .distinct() - ) + prof_data = department_data.lab_managers professors = [] where_conditions = [] for prof in prof_data: - professors.append(prof.to_dict()) + professors.append( + { + "name": prof.getName(), + "rcs_id": prof.getEmail(), + "image": "https://www.svgrepo.com/show/206842/professor.svg", + } + ) where_conditions.append(LabManager.id == prof.id) result["professors"] = professors - query = query.where(db.or_(*where_conditions)) - data = db.session.execute(query).scalars() - opportunities = [opportunity.to_dict() for opportunity in data] - - result["opportunities"] = opportunities + result["image"] = ( + "https://t4.ftcdn.net/jpg/02/77/10/87/360_F_277108701_1JAbS8jg7Gw42dU6nz7sF72bWiCm3VMv.jpg" + ) return result @@ -184,22 +180,32 @@ def getLabManagers(): return result -@main_blueprint.route("/getProfessorProfile/", methods=["GET"]) -def getProfessorProfile(id: int): +@main_blueprint.get("/getProfessorProfile/") +def getProfessorProfile(email: int): # test code until database code is added + query = db.session.execute(db.select(User).where(User.email == email)) + data = query.all() + user = data[0][0] + lm = user.getLabManager() - # TODO: Use JOIN query - lab_manager = db.first_or_404(db.select(LabManager).where(LabManager.id == id)) - user = db.first_or_404(db.select(User).where(User.lab_manager_id == id)) + result = {} - dictionary = lab_manager.to_dict() | user.to_dict() - dictionary["image"] = ( - "https://cdn.dribbble.com/users/2033319/screenshots/12591684/media/0557608c87ed8c5a80bd5faa48c3cd71.png" - ) + dictionary = user.to_dict() + + dictionary["image"] = "https://www.svgrepo.com/show/206842/professor.svg" + dictionary["department"] = lm.department_id + dictionary["email"] = user.email dictionary["role"] = "admin" dictionary["description"] = ( - "I am the evil professor Doofenshmirtz. I am a professor at RPI and I am looking for students to help me with my evil schemes" + "This is the description from the backend but we need to add more fields for LabManager" + ) + + # clean data + dictionary["name"] = ( + dictionary.pop("first_name") + " " + dictionary.pop("last_name") ) + dictionary.pop("class_year") + return dictionary @@ -251,6 +257,20 @@ def getProfessorCookies(id: str): return dictionary +@main_blueprint.get("/getStaff/") +def getStaff(department: str): + query = db.session.execute( + db.select(LabManager).filter(LabManager.department_id == department) + ) + data = query.all() + dictionary = {} + for item in data: + dictionary[item[0].rcs_id] = item[0].to_dict() + dictionary[item[0].rcs_id].pop("rcs_id") + + return dictionary + + @main_blueprint.post("/changeActiveStatus") def changeActiveStatus() -> dict[str, bool]: data = request.get_json() @@ -299,7 +319,7 @@ def schools() -> list[Any]: return result -@main_blueprint.get("/departments") +@main_blueprint.get("/departmentsList") def departments() -> list[Any]: data = db.session.execute( diff --git a/labconnect/models.py b/labconnect/models.py index d9e463ba..28407e26 100644 --- a/labconnect/models.py +++ b/labconnect/models.py @@ -1,4 +1,5 @@ -from sqlalchemy import Enum +from sqlalchemy import Enum, Index, func, event +from sqlalchemy.dialects.postgresql import TSVECTOR from labconnect import db from labconnect.helpers import CustomSerializerMixin, LocationEnum, SemesterEnum @@ -52,6 +53,15 @@ class User(db.Model, CustomSerializerMixin): majors = db.relationship("UserMajors", back_populates="user") courses = db.relationship("UserCourses", back_populates="user") + def getLabManager(self): + return self.lab_manager + + def __repr__(self): + return f"" + + def __str__(self): + return f" {self.first_name} {self.last_name}" + class ManagementPermissions(db.Model): __tablename__ = "management_permissions" @@ -80,6 +90,15 @@ class LabManager(db.Model, CustomSerializerMixin): "Leads", back_populates="lab_manager", passive_deletes=True ) + def getUser(self): + return User.query.filter_by(lab_manager_id=self.id).all() + + def getName(self): + return self.user[0].first_name + " " + self.user[0].last_name + + def getEmail(self): + return self.user[0].email + # rpi_schools( name, description ), key: name class RPISchools(db.Model, CustomSerializerMixin): @@ -168,6 +187,21 @@ class Opportunities(db.Model, CustomSerializerMixin): "UserSavedOpportunities", back_populates="opportunity", passive_deletes=True ) + # Search Vector + search_vector = db.Column(TSVECTOR) + + __table_args__ = ( + Index("ix_opportunity_search_vector", search_vector, postgresql_using="gin"), + ) + + +@event.listens_for(Opportunities, "before_insert") +@event.listens_for(Opportunities, "before_update") +def update_search_vector(mapper, connection, target): + target.search_vector = func.to_tsvector( + "english", target.name + " " + target.description + ) + # courses( course_code, course_name ), key: course_code class Courses(db.Model, CustomSerializerMixin): diff --git a/migrations/README b/migrations/README new file mode 100644 index 00000000..17aec75f --- /dev/null +++ b/migrations/README @@ -0,0 +1,21 @@ +Single-database configuration for Flask. + +# To upgrade the database: + +1. Run $ flask db upgrade to apply current migration +2. Preform any changes to labconnect/models.py +3. Run $ flask db migrate -m "description of changes" +4. The migration will be automatically created in migrations/versions. +Review migration to ensure it is correct. +5. Run $ flask db upgrade. Your changes will now be reflected in labconnect/models.py + +# To downgrade the database: +This will revert the database back to the version before last upgrade + +1. In labconnect/models.py, view downgrade() method to see the changes +that will be made with this downgrade. +2. If these are the desired changes, run $ flask db downgrade + +# Official Documnetation +https://flask-migrate.readthedocs.io/en/latest/ + diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 00000000..ec9d45c2 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 00000000..1a96fbfd --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,108 @@ +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger("alembic.env") + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions["migrate"].db.get_engine() + except (TypeError, AttributeError): + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions["migrate"].db.engine + + +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace("%", "%%") + except AttributeError: + return str(get_engine().url).replace("%", "%%") + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option("sqlalchemy.url", get_engine_url()) +target_db = current_app.extensions["migrate"].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, "metadatas"): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure(url=url, target_metadata=get_metadata(), literal_binds=True) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(_unusedcontext, _unusedrevision, directives): + if getattr(config.cmd_opts, "autogenerate", False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info("No changes in schema detected.") + + conf_args = current_app.extensions["migrate"].configure_args + if conf_args.get("process_revision_directives") is None: + conf_args["process_revision_directives"] = process_revision_directives + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=get_metadata(), **conf_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 00000000..2c015630 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/55928fddcb12_initial_migration.py b/migrations/versions/55928fddcb12_initial_migration.py new file mode 100644 index 00000000..c0e4ea6d --- /dev/null +++ b/migrations/versions/55928fddcb12_initial_migration.py @@ -0,0 +1,292 @@ +"""Initial migration. + +Revision ID: 55928fddcb12 +Revises: +Create Date: 2024-09-24 16:26:33.004366 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "55928fddcb12" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "class_years", + sa.Column("class_year", sa.Integer(), nullable=False), + sa.Column("active", sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint("class_year"), + ) + op.create_table( + "courses", + sa.Column("code", sa.String(length=8), nullable=False), + sa.Column("name", sa.String(length=128), nullable=True), + sa.PrimaryKeyConstraint("code"), + ) + op.create_table( + "majors", + sa.Column("code", sa.String(length=4), nullable=False), + sa.Column("name", sa.String(length=64), nullable=True), + sa.PrimaryKeyConstraint("code"), + ) + op.create_table( + "opportunities", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("name", sa.String(length=64), nullable=True), + sa.Column("description", sa.String(length=2000), nullable=True), + sa.Column("recommended_experience", sa.String(length=500), nullable=True), + sa.Column("pay", sa.Float(), nullable=True), + sa.Column("one_credit", sa.Boolean(), nullable=True), + sa.Column("two_credits", sa.Boolean(), nullable=True), + sa.Column("three_credits", sa.Boolean(), nullable=True), + sa.Column("four_credits", sa.Boolean(), nullable=True), + sa.Column( + "semester", + sa.Enum("SPRING", "FALL", "SUMMER", name="semesterenum"), + nullable=True, + ), + sa.Column("year", sa.Integer(), nullable=True), + sa.Column("application_due", sa.Date(), nullable=True), + sa.Column("active", sa.Boolean(), nullable=False), + sa.Column("last_updated", sa.DateTime(), nullable=True), + sa.Column( + "location", + sa.Enum( + "TBD", + "AMOS_EATON", + "CARNEGIE", + "CBIS", + "CCI", + "CII", + "COGSWELL", + "DCC", + "EMPAC", + "GREENE", + "JEC", + "JROWL", + "LALLY", + "LINAC", + "MRC", + "PITTSBURGH", + "RICKETTS", + "SAGE", + "VCC", + "WALKER", + "WEST", + "WINSLOW", + "REMOTE", + name="locationenum", + ), + nullable=True, + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "rpi_schools", + sa.Column("name", sa.String(length=64), nullable=False), + sa.Column("description", sa.String(length=2000), nullable=True), + sa.PrimaryKeyConstraint("name"), + ) + op.create_table( + "recommends_class_years", + sa.Column("opportunity_id", sa.Integer(), nullable=False), + sa.Column("class_year", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["class_year"], ["class_years.class_year"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["opportunity_id"], ["opportunities.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("opportunity_id", "class_year"), + ) + op.create_table( + "recommends_courses", + sa.Column("opportunity_id", sa.Integer(), nullable=False), + sa.Column("course_code", sa.String(length=8), nullable=False), + sa.ForeignKeyConstraint(["course_code"], ["courses.code"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["opportunity_id"], ["opportunities.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("opportunity_id", "course_code"), + ) + op.create_table( + "recommends_majors", + sa.Column("opportunity_id", sa.Integer(), nullable=False), + sa.Column("major_code", sa.String(length=4), nullable=False), + sa.ForeignKeyConstraint(["major_code"], ["majors.code"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["opportunity_id"], ["opportunities.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("opportunity_id", "major_code"), + ) + op.create_table( + "rpi_departments", + sa.Column("name", sa.String(length=64), nullable=False), + sa.Column("description", sa.String(length=2000), nullable=True), + sa.Column("school_id", sa.String(length=64), nullable=True), + sa.ForeignKeyConstraint( + ["school_id"], + ["rpi_schools.name"], + ), + sa.PrimaryKeyConstraint("name"), + ) + op.create_table( + "lab_manager", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("department_id", sa.String(length=64), nullable=True), + sa.ForeignKeyConstraint( + ["department_id"], + ["rpi_departments.name"], + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "leads", + sa.Column("lab_manager_id", sa.Integer(), nullable=False), + sa.Column("opportunity_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["lab_manager_id"], ["lab_manager.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["opportunity_id"], ["opportunities.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("lab_manager_id", "opportunity_id"), + ) + op.create_table( + "user", + sa.Column("id", sa.String(length=9), nullable=False), + sa.Column("email", sa.String(length=150), nullable=False), + sa.Column("first_name", sa.String(length=50), nullable=False), + sa.Column("last_name", sa.String(length=200), nullable=False), + sa.Column("preferred_name", sa.String(length=50), nullable=True), + sa.Column("phone_number", sa.String(length=15), nullable=True), + sa.Column("website", sa.String(length=512), nullable=True), + sa.Column("class_year", sa.Integer(), nullable=True), + sa.Column("lab_manager_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["class_year"], + ["class_years.class_year"], + ), + sa.ForeignKeyConstraint( + ["lab_manager_id"], + ["lab_manager.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + sa.UniqueConstraint("id"), + ) + op.create_table( + "management_permissions", + sa.Column("user_id", sa.String(length=9), nullable=False), + sa.Column("super_admin", sa.Boolean(), nullable=False), + sa.Column("admin", sa.Boolean(), nullable=False), + sa.Column("moderator", sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("user_id"), + ) + op.create_table( + "participates", + sa.Column("user_id", sa.String(length=9), nullable=False), + sa.Column("opportunity_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["opportunity_id"], + ["opportunities.id"], + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("user_id", "opportunity_id"), + ) + op.create_table( + "user_courses", + sa.Column("user_id", sa.String(length=9), nullable=False), + sa.Column("course_code", sa.String(length=8), nullable=False), + sa.Column("in_progress", sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint( + ["course_code"], + ["courses.code"], + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("user_id", "course_code"), + ) + op.create_table( + "user_departments", + sa.Column("user_id", sa.String(length=9), nullable=False), + sa.Column("department_id", sa.String(length=64), nullable=False), + sa.ForeignKeyConstraint( + ["department_id"], + ["rpi_departments.name"], + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("user_id", "department_id"), + ) + op.create_table( + "user_majors", + sa.Column("user_id", sa.String(length=9), nullable=False), + sa.Column("major_code", sa.String(length=4), nullable=False), + sa.ForeignKeyConstraint( + ["major_code"], + ["majors.code"], + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("user_id", "major_code"), + ) + op.create_table( + "user_saved_opportunities", + sa.Column("user_id", sa.String(length=9), nullable=False), + sa.Column("opportunity_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["opportunity_id"], + ["opportunities.id"], + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("user_id", "opportunity_id"), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("user_saved_opportunities") + op.drop_table("user_majors") + op.drop_table("user_departments") + op.drop_table("user_courses") + op.drop_table("participates") + op.drop_table("management_permissions") + op.drop_table("user") + op.drop_table("leads") + op.drop_table("lab_manager") + op.drop_table("rpi_departments") + op.drop_table("recommends_majors") + op.drop_table("recommends_courses") + op.drop_table("recommends_class_years") + op.drop_table("rpi_schools") + op.drop_table("opportunities") + op.drop_table("majors") + op.drop_table("courses") + op.drop_table("class_years") + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt index 2ddf0d43..0a7a5676 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,35 +1,32 @@ -annotated-types==0.7.0 -blinker==1.7.0 +alembic==1.13.3 +blinker==1.8.2 click==8.1.7 -coverage==7.5.4 -dnspython==2.6.1 -email_validator==2.1.1 +coverage==7.6.1 Flask==3.0.3 Flask-Cors==5.0.0 Flask-JWT-Extended==4.6.0 +Flask-Migrate==4.0.7 Flask-SQLAlchemy==3.1.1 gunicorn==23.0.0 -idna==3.7 iniconfig==2.0.0 isodate==0.6.1 itsdangerous==2.2.0 Jinja2==3.1.4 -lxml==5.2.2 +lxml==5.3.0 +Mako==1.3.5 MarkupSafe==2.1.5 -orjson==3.10.3 +orjson==3.10.7 packaging==24.1 pluggy==1.5.0 +psycopg2==2.9.9 psycopg2-binary==2.9.9 -pydantic==2.7.0 -pydantic_core==2.18.1 PyJWT==2.9.0 -PyStarter==1.5.1 -pytest==8.2.2 +pytest==8.3.3 pytest-cov==5.0.0 python3-saml==1.16.0 -pytz==2024.1 +pytz==2024.2 +setuptools==70.3.0 six==1.16.0 -spectree==1.2.9 SQLAlchemy==2.0.29 sqlalchemy-serializer==1.4.22 typing_extensions==4.12.2 diff --git a/run.sh b/run.sh old mode 100644 new mode 100755 index 256c37d5..2e8f4acf --- a/run.sh +++ b/run.sh @@ -1,5 +1,5 @@ -#! /bin/bash +#! /bin/sh # Eventually add alembic migrations here - -gunicorn run:app -w 6 --preload --max-requests-jitter 300 --bind 0.0.0.0:8000 \ No newline at end of file +flask db upgrade +gunicorn app:app -w 6 --preload --max-requests-jitter 300 --bind 0.0.0.0:9000