diff --git a/app/blueprints/webhook/routes.py b/app/blueprints/webhook/routes.py index ead3ae570..aa470de25 100644 --- a/app/blueprints/webhook/routes.py +++ b/app/blueprints/webhook/routes.py @@ -22,9 +22,7 @@ def deploy(): web_container = service.get_web_container() # Pull the latest code in the container - service.execute_container_command(web_container, 'git remote -v') - service.execute_container_command(web_container, 'git remote set-url origin https://github.com/diverso-lab/uvlhub') - service.execute_container_command(web_container, 'git pull origin main') + service.execute_container_command(web_container, '/app/scripts/git_update.sh') # Update dependencies in the container service.execute_container_command(web_container, 'pip install -r requirements.txt') @@ -34,5 +32,8 @@ def deploy(): # Log the deployment service.log_deployment(web_container) + + # Ejecutar el script de reinicio en segundo plano + service.restart_container(web_container) return 'Deployment successful', 200 diff --git a/app/blueprints/webhook/services.py b/app/blueprints/webhook/services.py index 94da41ffe..8b9981cce 100644 --- a/app/blueprints/webhook/services.py +++ b/app/blueprints/webhook/services.py @@ -56,3 +56,6 @@ def log_deployment(self, container): log_entry = f"Deployment successful at {datetime.now(timezone.utc).isoformat()}\n" log_file_path = "/app/deployments.log" self.execute_container_command(container, f"sh -c 'echo \"{log_entry}\" >> {log_file_path}'") + + def restart_container(self, container): + subprocess.Popen(["/bin/sh", "/app/scripts/restart_container.sh", container.id]) diff --git a/docker/images/Dockerfile.webhook b/docker/images/Dockerfile.webhook index a0448eda3..20ac32172 100644 --- a/docker/images/Dockerfile.webhook +++ b/docker/images/Dockerfile.webhook @@ -29,6 +29,12 @@ COPY requirements.txt . # Copy the wait-for-db.sh script and set execution permissions COPY --chmod=+x scripts/wait-for-db.sh ./scripts/ +# Copy the git_update.sh script and set execution permissions +COPY --chmod=+x scripts/git_update.sh ./scripts/ + +# Copy the restart_container.sh script and set execution permissions +COPY --chmod=+x scripts/restart_container.sh ./scripts/ + # Install any needed packages specified in requirements.txt and upgrade pip RUN pip install --no-cache-dir -r requirements.txt \ && pip install --no-cache-dir --upgrade pip \ diff --git a/migrations/versions/42141e11fb97_.py b/migrations/versions/42141e11fb97_.py new file mode 100644 index 000000000..dc1e85562 --- /dev/null +++ b/migrations/versions/42141e11fb97_.py @@ -0,0 +1,184 @@ +"""empty message + +Revision ID: 42141e11fb97 +Revises: 861935baacf0 +Create Date: 2024-06-25 09:47:08.175317 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '42141e11fb97' +down_revision = '861935baacf0' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('ds_metrics', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('number_of_models', sa.String(length=120), nullable=True), + sa.Column('number_of_features', sa.String(length=120), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('fm_metrics', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('solver', sa.Text(), nullable=True), + sa.Column('not_solver', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('email', sa.String(length=256), nullable=False), + sa.Column('password', sa.String(length=256), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email') + ) + op.create_table('webhook', + sa.Column('id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('zenodo', + sa.Column('id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ds_meta_data', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('deposition_id', sa.Integer(), nullable=True), + sa.Column('title', sa.String(length=120), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('publication_type', sa.Enum('NONE', 'ANNOTATION_COLLECTION', 'BOOK', 'BOOK_SECTION', 'CONFERENCE_PAPER', 'DATA_MANAGEMENT_PLAN', 'JOURNAL_ARTICLE', 'PATENT', 'PREPRINT', 'PROJECT_DELIVERABLE', 'PROJECT_MILESTONE', 'PROPOSAL', 'REPORT', 'SOFTWARE_DOCUMENTATION', 'TAXONOMIC_TREATMENT', 'TECHNICAL_NOTE', 'THESIS', 'WORKING_PAPER', 'OTHER', name='publicationtype'), nullable=False), + sa.Column('publication_doi', sa.String(length=120), nullable=True), + sa.Column('dataset_doi', sa.String(length=120), nullable=True), + sa.Column('tags', sa.String(length=120), nullable=True), + sa.Column('ds_metrics_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['ds_metrics_id'], ['ds_metrics.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('fm_meta_data', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uvl_filename', sa.String(length=120), nullable=False), + sa.Column('title', sa.String(length=120), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('publication_type', sa.Enum('NONE', 'ANNOTATION_COLLECTION', 'BOOK', 'BOOK_SECTION', 'CONFERENCE_PAPER', 'DATA_MANAGEMENT_PLAN', 'JOURNAL_ARTICLE', 'PATENT', 'PREPRINT', 'PROJECT_DELIVERABLE', 'PROJECT_MILESTONE', 'PROPOSAL', 'REPORT', 'SOFTWARE_DOCUMENTATION', 'TAXONOMIC_TREATMENT', 'TECHNICAL_NOTE', 'THESIS', 'WORKING_PAPER', 'OTHER', name='publicationtype'), nullable=False), + sa.Column('publication_doi', sa.String(length=120), nullable=True), + sa.Column('tags', sa.String(length=120), nullable=True), + sa.Column('uvl_version', sa.String(length=120), nullable=True), + sa.Column('fm_metrics_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['fm_metrics_id'], ['fm_metrics.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('user_profile', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('orcid', sa.String(length=19), nullable=True), + sa.Column('affiliation', sa.String(length=100), nullable=True), + sa.Column('name', sa.String(length=100), nullable=False), + sa.Column('surname', sa.String(length=100), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('user_id') + ) + op.create_table('author', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=120), nullable=False), + sa.Column('affiliation', sa.String(length=120), nullable=True), + sa.Column('orcid', sa.String(length=120), nullable=True), + sa.Column('ds_meta_data_id', sa.Integer(), nullable=True), + sa.Column('fm_meta_data_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['ds_meta_data_id'], ['ds_meta_data.id'], ), + sa.ForeignKeyConstraint(['fm_meta_data_id'], ['fm_meta_data.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('data_set', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('ds_meta_data_id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['ds_meta_data_id'], ['ds_meta_data.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ds_download_record', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('dataset_id', sa.Integer(), nullable=True), + sa.Column('download_date', sa.DateTime(), nullable=False), + sa.Column('download_cookie', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['dataset_id'], ['data_set.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('ds_view_record', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('dataset_id', sa.Integer(), nullable=True), + sa.Column('view_date', sa.DateTime(), nullable=False), + sa.Column('view_cookie', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['dataset_id'], ['data_set.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('feature_model', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('data_set_id', sa.Integer(), nullable=False), + sa.Column('fm_meta_data_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['data_set_id'], ['data_set.id'], ), + sa.ForeignKeyConstraint(['fm_meta_data_id'], ['fm_meta_data.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('file', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=120), nullable=False), + sa.Column('checksum', sa.String(length=120), nullable=False), + sa.Column('size', sa.Integer(), nullable=False), + sa.Column('feature_model_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['feature_model_id'], ['feature_model.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('file_download_record', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('file_id', sa.Integer(), nullable=True), + sa.Column('download_date', sa.DateTime(), nullable=False), + sa.Column('download_cookie', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['file_id'], ['file.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('file_view_record', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('file_id', sa.Integer(), nullable=False), + sa.Column('view_date', sa.DateTime(), nullable=True), + sa.Column('view_cookie', sa.String(length=36), nullable=True), + sa.ForeignKeyConstraint(['file_id'], ['file.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('file_view_record') + op.drop_table('file_download_record') + op.drop_table('file') + op.drop_table('feature_model') + op.drop_table('ds_view_record') + op.drop_table('ds_download_record') + op.drop_table('data_set') + op.drop_table('author') + op.drop_table('user_profile') + op.drop_table('fm_meta_data') + op.drop_table('ds_meta_data') + op.drop_table('zenodo') + op.drop_table('webhook') + op.drop_table('user') + op.drop_table('fm_metrics') + op.drop_table('ds_metrics') + # ### end Alembic commands ### diff --git a/scripts/git_update.sh b/scripts/git_update.sh new file mode 100755 index 000000000..58e5a7a5c --- /dev/null +++ b/scripts/git_update.sh @@ -0,0 +1,20 @@ +#!/bin/sh + +# Get the current remote URL +REMOTE_URL=$(git remote get-url origin) + +# Check if the URL is SSH +if echo "$REMOTE_URL" | grep -q "git@"; then + # Convertir la URL SSH a HTTPS + HTTPS_URL=$(echo "$REMOTE_URL" | sed 's/git@/https:\/\//; s/:/\//') + git remote set-url origin "$HTTPS_URL" + + # Pull from the main branch + git pull origin main + + # Restore the original SSH URL + git remote set-url origin "$REMOTE_URL" +else + # Pull from the main branch if the URL is already HTTPS + git pull origin main +fi diff --git a/scripts/restart_container.sh b/scripts/restart_container.sh new file mode 100755 index 000000000..d30b62979 --- /dev/null +++ b/scripts/restart_container.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +sleep 5 + +docker restart "$1"