From 0493d4657e9abc86e964dd40754229de3f9493d5 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Mon, 9 Sep 2019 15:01:32 +0200 Subject: [PATCH 01/34] Switch to python_3.7 branches --- phovea_product.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index eaf4725..afc40d8 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,27 +10,27 @@ "type": "api", "label": "gapminder_server", "repo": "phovea/phovea_server", - "branch": "v2.0.0", + "branch": "python_3.7", "additional": [ { "name": "phovea_security_flask", "repo": "phovea/phovea_security_flask", - "branch": "v2.0.3" + "branch": "python_3.7" }, { "name": "phovea_data_redis", "repo": "phovea/phovea_data_redis", - "branch": "v2.0.0" + "branch": "python_3.7" }, { "name": "phovea_data_mongo", "repo": "phovea/phovea_data_mongo", - "branch": "v2.0.0" + "branch": "python_3.7" }, { "name": "phovea_clue", "repo": "phovea/phovea_clue", - "branch": "^v2.0.0" + "branch": "python_3.7" } ], "data": [ From a39ac911ec8a39dc6bb5e56cad228ca5cf14e1df Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Mon, 9 Sep 2019 15:02:50 +0200 Subject: [PATCH 02/34] Switch to develop branches --- phovea_product.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index eaf4725..97dfbb4 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,27 +10,27 @@ "type": "api", "label": "gapminder_server", "repo": "phovea/phovea_server", - "branch": "v2.0.0", + "branch": "develop", "additional": [ { "name": "phovea_security_flask", "repo": "phovea/phovea_security_flask", - "branch": "v2.0.3" + "branch": "develop" }, { "name": "phovea_data_redis", "repo": "phovea/phovea_data_redis", - "branch": "v2.0.0" + "branch": "develop" }, { "name": "phovea_data_mongo", "repo": "phovea/phovea_data_mongo", - "branch": "v2.0.0" + "branch": "develop" }, { "name": "phovea_clue", "repo": "phovea/phovea_clue", - "branch": "^v2.0.0" + "branch": "develop" } ], "data": [ From 2351b60f6ae767bb218317d2e4533c63bf8d8374 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:32:09 +0100 Subject: [PATCH 03/34] use `circleci/python:3.7-buster-node-browsers` as Docker image --- .circleci/config.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a3e8144..a974ba0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,8 +3,7 @@ jobs: build: working_directory: ~/phovea docker: - - image: caleydo/phovea_circle_python:1.0 - - image: docker:17.05.0-ce-git + - image: circleci/python:3.7-buster-node-browsers # for node version see Dockerfile on https://hub.docker.com/r/circleci/python steps: - checkout - setup_remote_docker From 4bcb339045ea2a0bd1ce534554ab9884545b324c Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:32:22 +0100 Subject: [PATCH 04/34] update awscli version --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a974ba0..ce5869d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,15 +17,15 @@ jobs: paths: - ./node_modules - restore_cache: - key: awscli-1.11.113 + key: awscli-1.16.312 - run: name: install-aws-cli command: | virtualenv ~/venv . ~/venv/bin/activate - pip install awscli==1.11.113 + pip install awscli-1.16.312 - save_cache: - key: awscli-1.11.113 + key: awscli-1.16.312 paths: - ~/venv - run: @@ -66,7 +66,7 @@ jobs: export AWS_DEFAULT_REGION=eu-central-1 baseName=${CIRCLE_PROJECT_REPONAME%_product} # list repos filter to just the one of this product and delete untagged ones - aws ecr describe-repositories --output text | cut -f5 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done + aws ecr describe-repositories --output text | cut -f6 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done - deploy: name: restart aws #assumes the task definition is called - command: | From 692af305ca15af7953cd2425322d2db5f68ea4dc Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:32:46 +0100 Subject: [PATCH 05/34] switch to develop branches in _phovea_product.json_ --- phovea_product.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index afc40d8..97dfbb4 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,27 +10,27 @@ "type": "api", "label": "gapminder_server", "repo": "phovea/phovea_server", - "branch": "python_3.7", + "branch": "develop", "additional": [ { "name": "phovea_security_flask", "repo": "phovea/phovea_security_flask", - "branch": "python_3.7" + "branch": "develop" }, { "name": "phovea_data_redis", "repo": "phovea/phovea_data_redis", - "branch": "python_3.7" + "branch": "develop" }, { "name": "phovea_data_mongo", "repo": "phovea/phovea_data_mongo", - "branch": "python_3.7" + "branch": "develop" }, { "name": "phovea_clue", "repo": "phovea/phovea_clue", - "branch": "python_3.7" + "branch": "develop" } ], "data": [ From 4dc65389390bfcfb8bd50b43155f7588c2f3121b Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:33:31 +0100 Subject: [PATCH 06/34] delete _.travis.yml_ --- .travis.yml | 36 ------------------------------------ 1 file changed, 36 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f9b94b0..0000000 --- a/.travis.yml +++ /dev/null @@ -1,36 +0,0 @@ -language: node_js - -node_js: -- 6 - -services: -- docker - -before_install: -- export DISPLAY=:99.0 -- sh -e /etc/init.d/xvfb start -- if [[ `npm -v` != 3* ]]; then npm i -g npm@3; fi - -before_script: -- pip install --user awscli -- export PATH=$PATH:$HOME/.local/bin - -# build and push -#- $(aws ecr get-login --region eu-central-1) -#script: node build.js --skipTests --skipSaveImage --pushTo=$AWS_ECR_PREFIX --pushLatest --pushDaily - -script: node build.js --skipSaveImage - -deploy: - provider: releases - api_key: - secure: TK9/P34Bi3WuppiDrBCwVcn41yCBwmILaU8hXTBzUPbT7TbeFIwsC6/4CtH85Z+ZrUve4S5pTmWRNf2dQDxWw3uYu7+bJuemV2J1LHG76mognj+TNEiYxfLQUt3Gql4W7C7FcI4Rlx5/uMN9wY1wro8TWUBMwT6jjSrUWIvK3GXoojd5bHvJx07XpjWl9wCon4D0ruZiFoM2mdeP23lbc2GckETi32oEKswnQXxkMACmxbPzoWbvkxH4aK8Bt2Rj2sl2TbPhVkN6DAkHGkGAvLI+2/aRfG27+oo3OKsaDjbuGABct8TfZccJ970CbQ8kbnCjYxstvqkg1JWjF0W67sX/flBZZOEUA5l0OLWo6HqMGMxm7/lEQhIdPMsRmvXL+HVOxkMrB2dda58QzxVwiZp+rRqUaeabPZp8Kl5xodGrVxsBvxe6zAbJ5jCtCSumG6+kLyKI00/kYlghqQNrgUw0ZsYJlQ34h3lo/24QpaeyDpQoCkGWQgtgqiXGpeKSu7bCnOqIqAy3nbT9Utwj7K8gIasTG5idosEAz/THMampNbGDuyxxc340sYGNMg9Bhm1g2ILWRdtV470p5hwBtIDTKi3/PAizEO26+Wh0zI47Sg3ao57avcbCsTmzbZUeA5J4bojmchhJCHX8su9cSCGh/2fJA/1eBIgEvOQ8LNE= - file: build/* - on: - tags: true - -notifications: - slack: - secure: E8/1UIdHSczUbN+6i6gd1d5LM4vmLdwLQ30tpyjvnM0wvfDce76oPxLJAy240WJ5ybXRZUtNrttpVpt4tEXCy8aLFCmxD7s77rVloH+q1J8R/ptTFWZGhFGEujk1awEmVbzcWxJkV9/JENQaeGBKxwv8/EQwWwEkAb7p/+AJb9owmH88b3wUZUGHBWtbMiyyaF4Rm1Wg1stJB8Z1Ga7PRF4cqufTgcDdsCPVv9gAY+VxOIGqX/Vfuc9UWpUH8vq8lHUE7Inn5QS78kuFfSgLWga3H6Mu/Gko1XNlWk0QWWQBUvEZ6ZC6Wuo68KzvUjJHDTnx8WyfHue2JNHIslcX+eJq2WHLeEgM24VeNkILCGo/H/60NGHiSjrIv/Y9h6bQ9FDjo6TUyE4nbdPYN1RN9FQ5UbI9Y4Gi753H9mqnHWlEywBOzHxdZCAuz9Wh03CCF/blsvJ+Obbyo6Jrfe+g44jyi9kQdBNQ78qG6v4EXws8FiYao6x3PpgIwFix42Cpr+soAh5FpA3C1zHSAyZZpXF65/lrDl5yPNofK7Wy0B9bw+0I6Z/u7ZKFNVZXvYPGYvtUVcsALGBdmYc61+LCta36Po0KZseWVAlJj6QnOJDYzv0wvV/zsuf9A5KpYFGiqV9Q7zmtiO5FYF5sBy+lE7O9tHVO4O18IRndhRQgxhs= - on_success: change - on_failure: always From 0eece0af435ec197f94f42c76e6d3c35360b553d Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:36:43 +0100 Subject: [PATCH 07/34] update pip install command --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ce5869d..c90eda6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -23,7 +23,7 @@ jobs: command: | virtualenv ~/venv . ~/venv/bin/activate - pip install awscli-1.16.312 + pip install awscli==1.16.312 - save_cache: key: awscli-1.16.312 paths: From e652755faccc146d23e59cf4e121cd46c8f91e31 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:37:15 +0100 Subject: [PATCH 08/34] use `destination` instead of removed `prefix' --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c90eda6..09f6f49 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -58,7 +58,7 @@ jobs: node build.js --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo - store_artifacts: path: build - prefix: build + destination: build - deploy: name: cleanup untagged aws repo command: | From 31308cc97beb51e7af2a7b316ac4557abaa4d646 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:42:49 +0100 Subject: [PATCH 09/34] run `yo phovea:update` --- .circleci/config.yml | 63 +++- Jenkinsfile | 6 +- README.md | 10 +- build.js | 749 +++++++++++++++++++++++++++++++++---------- package.json | 14 +- 5 files changed, 650 insertions(+), 192 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 09f6f49..2cc66a5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -7,29 +7,42 @@ jobs: steps: - checkout - setup_remote_docker + - run: + name: Show Node.js and npm version + command: | + node -v + npm -v + - run: + name: Show Python and pip version + command: | + python --version + pip --version - restore_cache: - key: dependency-cache2-{{ checksum "package.json" }} + key: dependency-cache-{{ checksum "package.json" }} - run: - name: install-npm-wee + name: Install npm dependencies command: npm install - save_cache: - key: dependency-cache2-{{ checksum "package.json" }} + key: dependency-cache-{{ checksum "package.json" }} paths: - ./node_modules + - run: + name: Show installed npm dependencies + command: npm list --depth=1 || true - restore_cache: - key: awscli-1.16.312 + key: awscli-1.11.113 - run: - name: install-aws-cli + name: Install AWS CLI command: | virtualenv ~/venv . ~/venv/bin/activate - pip install awscli==1.16.312 + pip install awscli==1.11.113 - save_cache: - key: awscli-1.16.312 + key: awscli-1.11.113 paths: - ~/venv - run: - name: login ecr and docker hub + name: Login AWS ECR and DockerHub command: | . ~/venv/bin/activate cat > ~/.dockercfg << EOF @@ -43,7 +56,7 @@ jobs: login="$(aws ecr get-login --no-include-email)" ${login} - deploy: - name: build and deploy + name: Build and deploy command: | . ~/venv/bin/activate case $CIRCLE_BRANCH in @@ -60,15 +73,15 @@ jobs: path: build destination: build - deploy: - name: cleanup untagged aws repo + name: Cleanup untagged AWS repositories command: | . ~/venv/bin/activate export AWS_DEFAULT_REGION=eu-central-1 baseName=${CIRCLE_PROJECT_REPONAME%_product} # list repos filter to just the one of this product and delete untagged ones - aws ecr describe-repositories --output text | cut -f6 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done + aws ecr describe-repositories --output text | cut -f5 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done - deploy: - name: restart aws #assumes the task definition is called - + name: Restart AWS task # assumes the task definition is called - command: | . ~/venv/bin/activate export AWS_DEFAULT_REGION=eu-central-1 @@ -88,3 +101,29 @@ jobs: fi aws --output text ecs run-task --cluster caleydo --task-definition ${awsFamily} --started-by CircleCIAutoUpdate fi +workflows: + version: 2 +# build-nightly: +# triggers: +# - schedule: +# cron: "15 1 * * 1-5" # "At 01:15 on every day-of-week from Monday through Friday.”, see: https://crontab.guru/#15_1_*_*_1-5 +# filters: +# branches: +# only: +# - develop +# jobs: +# - build + build-branch: + jobs: + - build: + filters: + tags: + ignore: /^v.*/ + build-tag: + jobs: + - build: + filters: + branches: + ignore: /.*/ + tags: + only: /^v.*/ diff --git a/Jenkinsfile b/Jenkinsfile index 82f0a16..44e9742 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -21,9 +21,11 @@ node { try { withCredentials([usernameColonPassword(credentialsId: 'PHOVEA_GITHUB_CREDENTIALS', variable: 'PHOVEA_GITHUB_CREDENTIALS')]) { docker.withRegistry("https://922145058410.dkr.ecr.eu-central-1.amazonaws.com", "ecr:eu-central-1:PHOVEA_AWS_CREDENTIALS") { - wrap([$class: 'Xvfb']) { - sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=latest --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' + docker.withRegistry("", "PHOVEA_DOCKER_HUB_CREDENTIALS") { + wrap([$class: 'Xvfb']) { + sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=latest --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' } + } } } currentBuild.result = "SUCCESS" diff --git a/README.md b/README.md index 64fcc8d..979dc21 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ Installation ------------ ``` -git clone https://github.com/Caleydo/gapminder_product.git +git clone git@github.com:Caleydo/gapminder_product.git cd gapminder_product npm install ``` @@ -38,7 +38,7 @@ This repository is part of **[Phovea](http://phovea.caleydo.org/)**, a platform [phovea-url]: https://phovea.caleydo.org [npm-image]: https://badge.fury.io/js/gapminder_product.svg [npm-url]: https://npmjs.org/package/gapminder_product -[travis-image]: https://travis-ci.org/Caleydo/gapminder_product.svg?branch=master -[travis-url]: https://travis-ci.org/Caleydo/gapminder_product -[daviddm-image]: https://david-dm.org/Caleydo/gapminder_product/status.svg -[daviddm-url]: https://david-dm.org/Caleydo/gapminder_product +[travis-image]: https://travis-ci.org/datavisyn/gapminder_product.svg?branch=master +[travis-url]: https://travis-ci.org/datavisyn/gapminder_product +[daviddm-image]: https://david-dm.org/datavisyn/gapminder_product/status.svg +[daviddm-url]: https://david-dm.org/datavisyn/gapminder_product diff --git a/build.js b/build.js index d68016f..f6283d2 100644 --- a/build.js +++ b/build.js @@ -7,7 +7,9 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); +// see show help const argv = require('yargs-parser')(process.argv.slice(2)); + const quiet = argv.quiet !== undefined; const now = new Date(); @@ -15,32 +17,116 @@ const prefix = (n) => n < 10 ? ('0' + n) : n.toString(); const buildId = `${now.getUTCFullYear()}${prefix(now.getUTCMonth())}${prefix(now.getUTCDate())}-${prefix(now.getUTCHours())}${prefix(now.getUTCMinutes())}${prefix(now.getUTCSeconds())}`; pkg.version = pkg.version.replace('SNAPSHOT', buildId); const env = Object.assign({}, process.env); +const productName = pkg.name.replace('_product', ''); + +function showHelp(steps, chain) { + console.info(`node build.js -- step1 step2 +possible options: + * --quiet ... reduce log messages + * --serial ... build elements sequentially + * --skipTests ... skip tests + * --injectVersion ... injects the product version into the package.json of the built component + * --useSSH ... clone via ssh instead of https + * --skipCleanUp ... skip cleaning up old docker images + * --skipSaveImage ... skip saving the generated docker images + * --pushTo ... push docker images to the given registry + * --noDefaultTags ... don't push generated default tag : + * --pushExtra ... push additional custom tag: e.g., --pushExtra=develop + * --forceLabel ... force to use the label even only a single service exists + * --dryRun ... just compute chain no execution + * --help ... show this help message + +arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed + `); + + steps = Object.keys(steps); + const primary = steps.filter((d) => !d.includes(':')).sort((a, b) => a.localeCompare(b)); + const secondary = steps.filter((d) => d.includes(':')).sort((a, b) => a.localeCompare(b)); + + console.info('possible primary steps:\n ', primary.join('\n ')); + console.info('possible secondary steps:\n ', secondary.join('\n ')); + + console.info('default chain:\n', JSON.stringify(chain, null, ' ')); +} +/** + * generates a repo url to clone depending on the argv.useSSH option + * @param {string} url the repo url either in git@ for https:// form + * @returns the clean repo url + */ function toRepoUrl(url) { + if (url.startsWith('git@')) { + if (argv.useSSH) { + return url; + } + // have an ssh url need an http url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `https://${m[3]}/${m[4]}.git`; + } + if (url.startsWith('http')) { + if (!argv.useSSH) { + return url; + } + // have a http url need an ssh url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `git@${m[2]}:${m[4]}.git`; + } + if (!url.includes('/')) { + url = `Caleydo/${url}`; + } if (argv.useSSH) { - return `git@github.com:${url}.git` + return `git@github.com:${url}.git`; } - return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; + return `https://github.com/${url}.git`; } +/** + * guesses the credentials environment variable based on the given repository hostname + * @param {string} repo + */ +function guessUserName(repo) { + // extract the host + const host = repo.match(/:\/\/([^/]+)/)[1]; + const hostClean = host.replace(/\./g, '_').toUpperCase(); + // e.g. GITHUB_COM_CREDENTIALS + const envVar = process.env[`${hostClean}_CREDENTIALS`]; + if (envVar) { + return envVar; + } + return process.env.PHOVEA_GITHUB_CREDENTIALS; +} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; - if (repo.includes('git@github.com') || !username_and_password) { + if (repo.startsWith('git@')) { // ssh return repo; } - return repo.replace('://', `://${username_and_password}@`); + const usernameAndPassword = guessUserName(repo); + if (!usernameAndPassword) { // ssh or no user given + return repo; + } + return repo.replace('://', `://${usernameAndPassword}@`); } - function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/(.*)\.git/)[0] + return url.match(/\/([^/]+)\.git/)[0]; } return url.slice(url.lastIndexOf('/') + 1); } +/** + * deep merge with array union + * @param {*} target + * @param {*} source + */ +function mergeWith(target, source) { + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + _.mergeWith(target, source, mergeArrayUnion); + return target; +} + function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -49,7 +135,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - const request = http.get(url, (response) => { + http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -72,11 +158,13 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - switch(desc.type) { - case 'url': - const destName = toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); - case 'repo': + desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); + switch (desc.type) { + case 'url': { + desc.name = desc.name || toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); + } + case 'repo': { desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -85,6 +173,7 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); + } default: console.error('unknown data type:', desc.type); return null; @@ -96,25 +185,36 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options + * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); + const out = []; + if (p.stdout) { + p.stdout.on('data', (chunk) => out.push(chunk)); + } + if (p.stderr) { + p.stderr.on('data', (chunk) => out.push(chunk)); + } p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - reject(`${cmd} failed with status code ${code} ${signal}`); + if (args.quiet) { + // log output what has been captured + console.log(out.join('\n')); + } + reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); } }); }); } - /** * run npm with the given args * @param cwd working directory @@ -151,23 +251,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code == 0 ? resolve() : reject(code)); + p2.on('close', (code) => code === 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages(productName) { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); +function dockerRemoveImages() { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve, reject) => { + return new Promise((resolve) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code == 0) { + if (code === 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -197,12 +297,13 @@ function yo(generator, options, cwd, args) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); - yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); const _args = Array.isArray(args) ? args.join(' ') : args || ''; return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + yeomanEnv.lookup(() => { + yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + }); } catch (e) { console.error('error', e, e.stack); reject(e); @@ -215,6 +316,7 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; + return yo('clone-repo', { branch: p.branch, extras: '--depth 1', @@ -224,39 +326,31 @@ function cloneRepo(p, cwd) { } function resolvePluginType(p, dir) { + if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { + p.pluginType = 'lib'; + p.isHybridType = false; + return; + } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } -function preBuild(p, dir) { - const hasAdditional = p.additional.length > 0; - let act = fs.emptyDirAsync(dir) - .then(() => cloneRepo(p, dir)) - .then(() => resolvePluginType(p, dir)); - if (hasAdditional) { - act = act - .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); - } - return act; -} - function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); - } else { - return Promise.resolve({}); } + return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - //copy data from first service + // copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -272,104 +366,79 @@ function patchComposeFile(p, composeTemplate) { return r; } - -function postBuild(p, dir, buildInSubDir) { - return Promise.resolve(null) - .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) - .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) - .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) - .then(mergeCompose); +function patchDockerfile(p, dockerFile) { + if (!p.baseImage) { + return null; + } + return fs.readFileAsync(dockerFile).then((content) => { + content = content.toString(); + // patch the Dockerfile by replacing the FROM statement + const r = /^\s*FROM (.+)\s*$/igm; + const fromImage = r.exec(content)[1]; + console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); + content = content.replace(r, `FROM ${p.baseImage}`); + return fs.writeFileAsync(dockerFile, content); + }); } -function buildWebApp(p, dir) { - console.log(dir, chalk.blue('building web application:'), p.label); - const name = p.name; - const hasAdditional = p.additional.length > 0; - let act = preBuild(p, dir); - //let act = Promise.resolve(null); - if (hasAdditional) { - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)) - .then(() => npm(dir, 'install')); - //test all modules - if (hasAdditional && !argv.skipTests) { - act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); +function patchWorkspace(p) { + // prepend docker_script in the workspace + if (fs.existsSync('./docker_script.sh')) { + console.log('patch workspace and prepend docker_script.sh'); + let content = fs.readFileSync('./docker_script.sh').toString(); + if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { + content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); } - act = act - .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); - } else { - act = act - .then(() => npm(dir + '/' + name, 'install')) - .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); + fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); } - return act - .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) - .then(postBuild.bind(null, p, dir, true)); -} - -function buildServerApp(p, dir) { - console.log(dir, chalk.blue('building service package:'), p.label); - const name = p.name; - - let act = preBuild(p, dir); - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)); - if (!argv.skipTests) { - act = act - .then(() => console.log(chalk.yellow('create test environment'))) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); + if (argv.injectVersion) { + const pkgfile = `${p.tmpDir}/${p.name}/package.json`; + if (fs.existsSync(pkgfile)) { + const ppkg = require(pkgfile); + ppkg.version = pkg.version; + fs.writeJSONSync(pkgfile, ppkg); + } else { + console.warn('cannot inject version, main package.json not found'); + } } - act = act - .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) - .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); - - //copy all together - act = act - .then(() => fs.ensureDirAsync(`${dir}/build/source`)) - .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); - - //copy data packages - act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); - //let act = Promise.resolve([]); - - //copy main deploy thing and create a docker out of it - return act - .then(() => fs.ensureDirAsync(`${dir}/deploy`)) - .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) - .then(postBuild.bind(null, p, dir, false)); -} - -function buildImpl(d, dir) { - switch (d.type) { - case 'static': - case 'web': - return buildWebApp(d, dir); - case 'api': - d.name = d.name || 'phovea_server'; - return buildServerApp(d, dir); - case 'service': - return buildServerApp(d, dir); - default: - console.error(chalk.red('unknown product type: ' + d.type)); - return Promise.resolve(null); + // inject extra phovea.js + if (fs.existsSync('./phovea.js')) { + console.log('patch workspace and add workspace phovea.js'); + let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); + fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); + + registry += `\n\n + import {register} from 'phovea_core/src/plugin'; + register('__product',require('./phovea.js')); + `; + fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); } } function mergeCompose(composePartials) { let dockerCompose = {}; - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); + composePartials.forEach((c) => mergeWith(dockerCompose, c)); return dockerCompose; } -function buildCompose(descs, composePartials) { +function buildComposePartials(descs) { + const validDescs = descs.filter((d) => !d.error); + + // merge a big compose file including all + return Promise.all(validDescs.map((p) => { + return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) + .then((partials) => { + p.composePartial = mergeCompose(partials); + }); + })); +} + +function buildCompose(descs, dockerComposePatch) { console.log('create docker-compose.yml'); - const dockerCompose = mergeCompose(composePartials); + + const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -386,6 +455,23 @@ function buildCompose(descs, composePartials) { services[w].links.push(`${s.label}:${s.name}`); }); }); + + if (services._host) { + // inline _host to apis + const host = services._host; + delete services._host; + api.forEach((s) => { + services[s] = mergeCompose([host, services[s]]); + }); + } + + Object.keys(dockerComposePatch.services).forEach((service) => { + if (services[service] !== undefined) { + console.log(`patch generated docker-compose file for ${service}`); + mergeWith(services[service], dockerComposePatch.services[service]); + } + }); + const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -402,7 +488,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { //push additional custom prefix without the version + if (argv.pushExtra) { // push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -415,6 +501,214 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } +function loadPatchFile() { + const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); + if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { + return {services: {}}; + } + const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); + const yaml = require('yamljs'); + const r = yaml.parse(content.toString()); + if (!r.services) { + r.services = {}; + } + return r; +} + +function fillDefaults(descs, dockerComposePatch) { + const singleService = descs.length === 1 && (argv.forceLabel === undefined); + + descs.forEach((d, i) => { + // default values + d.additional = d.additional || []; + d.data = d.data || []; + d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); + d.label = d.label || d.name; + d.symlink = d.symlink || null; // default value + d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; + // incorporate patch file + if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { + // use a different base image to build the item + d.baseImage = dockerComposePatch.services[d.label].image; + delete dockerComposePatch.services[d.label].image; + } + // include hint in the tmp directory which one is it + d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; + }); + + return descs; +} + +function asChain(steps, chain) { + if (chain.length === 0) { + return []; + } + const possibleSteps = Object.keys(steps); + + const callable = (c) => { + if (typeof c === 'function') { + return c; + } + + if (typeof c === 'string') { + // simple lookup + if (!possibleSteps.includes(c)) { + console.error('invalid step:', c); + throw new Error('invalid step: ' + c); + } + return callable(steps[c]); + } + + if (Array.isArray(c)) { // sequential sub started + const sub = c.map(callable); + return () => { + console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); + let step = Promise.resolve(); + for (const s of sub) { + step = step.then(s); + } + return step; + }; + } + // parallel = object + const sub = Object.keys(c).map((ci) => callable(c[ci])); + return () => { + console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); + return Promise.all(sub.map((d) => d())); // run sub lazy combined with all + }; + }; + return chain.map(callable); +} + +function runChain(chain, catchErrors) { + let start = null; + let step = new Promise((resolve) => { + start = resolve; + }); + + for (const c of chain) { + step = step.then(c); + } + + step.catch(catchErrors); + + return () => { + start(); // resolve first to start chain + return step; // return last result + }; +} + +function strObject(items) { + const obj = {}; + for (const item of items) { + obj[item] = item; + } + return obj; +} + +function buildDockerImage(p) { + const buildInSubDir = p.type === 'web' || p.type === 'static'; + let buildArgs = ''; + // pass through http_proxy, no_proxy, and https_proxy env variables + for (const key of Object.keys(process.env)) { + const lkey = key.toLowerCase(); + if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { + // pass through + buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; + } + } + + // patch the docker file with the with an optional given baseImage + return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) + // create the container image + .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) + // tag the container image + .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); +} + +function createWorkspace(p) { + return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) + .then(() => patchWorkspace(p)); +} + +function installWebDependencies(p) { + return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); +} + +function cleanUpWebDependencies(p) { + return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); +} + +function resolvePluginTypes(p) { + if (p.pluginType) { + return Promise.resolve(); // already resolved + } + if (p.additional.length === 0) { + return resolvePluginType(p, p.tmpDir); + } + return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); +} + +function testWebAdditionals(p) { + return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); +} + +function buildWeb(p) { + const hasAdditional = p.additional.length > 0; + + let step; + if (hasAdditional) { + step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); + } else { + step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); + } + // move to target directory + return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); +} + +function installPythonTestDependencies(p) { + console.log(chalk.yellow('create test environment')); + return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); +} + +function buildServer(p) { + let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); + for (const pi of p.additional) { + act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); + } + + // copy all together + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); + + // copy main deploy thing and create a docker out of it + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); + + return act; +} + +function downloadServerDataFiles(p) { + if (!argv.serial) { + return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); + } + // serial + let act = Promise.resolve(); + for (const d of p.data) { + act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); + } + return act; +} + +function cleanWorkspace(descs) { + console.log(chalk.yellow('clean workspace')); + return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); +} + if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -422,59 +716,182 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { - // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const descs = require('./phovea_product.json'); - const singleService = descs.length === 1; - const productName = pkg.name.replace('_product', ''); - - - fs.emptyDirAsync('build') - .then(dockerRemoveImages.bind(this, productName)) - // move my own .yo-rc.json to avoid a conflict - .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) - .then(() => { - const buildOne = (d, i) => { - d.additional = d.additional || []; //default values - d.data = d.data || []; - d.name = d.name || fromRepoUrl(d.repo); - d.label = d.label || d.name; - if (singleService) { - d.image = `${productName}:${pkg.version}`; - } else { - d.image = `${productName}/${d.label}:${pkg.version}`; - } - let wait = buildImpl(d, './tmp' + i); - wait.catch((error) => { - d.error = error; - console.error('ERROR building ', d, error); - }); - return wait; - }; - if (argv.serial) { - let r = Promise.resolve([]); - for (let i = 0; i < descs.length; ++i) { - r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); - } - return r; - } else { - return Promise.all(descs.map(buildOne)); - } - }) - .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) - .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) - .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) - .then(() => { + const dockerComposePatch = loadPatchFile(); + const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); + + if (fs.existsSync('.yo-rc.json')) { + fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); + } + fs.ensureDirSync('build'); + + const cleanUp = () => { + if (fs.existsSync('.yo-rc_tmp.json')) { + fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); + } + }; + + const catchProductBuild = (p, act) => { + // no chaining to keep error + act.catch((error) => { + p.error = error; + console.error('ERROR building ', p.name, error); + }); + return act; + }; + + const steps = { + clean: () => cleanWorkspace(descs), + prune: dockerRemoveImages, + compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), + push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), + summary: () => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); + cleanUp(); if (anyErrors) { process.exit(1); } - }).catch((error) => { + } + }; + + const webTypes = ['static', 'web']; + const serverTypes = ['api', 'service']; + + const chainProducts = []; + for (let i = 0; i < descs.length; ++i) { + const p = descs[i]; + const suffix = p.name; + const hasAdditional = p.additional.length > 0; + const isWeb = webTypes.includes(p.type); + const isServer = serverTypes.includes(p.type); + + if (!isWeb && !isServer) { + console.error(chalk.red('unknown product type: ' + p.type)); + continue; + } + + fs.ensureDirSync(p.tmpDir); + + // clone repo + const subSteps = []; + steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); + subSteps.push(`clone:${suffix}`); + + if (hasAdditional) { + // clone extras + const cloneKeys = []; + for (const pi of p.additional) { + const key = `clone:${suffix}:${pi.name}`; + steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); + cloneKeys.push(key); + } + + if (argv.serial) { + subSteps.push(...cloneKeys); + } else { + subSteps.push(strObject(cloneKeys)); + } + } + + const needsWorskpace = (isWeb && hasAdditional) || isServer; + steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; + + if (isWeb) { + steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); + } else { // server + steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); + } + steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; + steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); + steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); + steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; + steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); + steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); + + subSteps.push(`prepare:${suffix}`); + subSteps.push(`install:${suffix}`); + if (!argv.skipTests) { + subSteps.push(`test:${suffix}`); + } + subSteps.push(`build:${suffix}`); + if (isServer && p.data.length > 0) { + subSteps.push(`data:${suffix}`); + } + if (isWeb) { + subSteps.push(`postbuild:${suffix}`); + } + subSteps.push(`image:${suffix}`); + if (!argv.skipSaveImage) { + subSteps.push(`save:${suffix}`); + } + + steps[`product:${suffix}`] = subSteps; + subSteps.name = `product:${suffix}`; + chainProducts.push(subSteps); + } + + // create some meta steps + { + const stepNames = Object.keys(steps); + for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { + const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); + if (sub.length <= 0) { + continue; + } + steps[meta] = argv.serial ? sub : strObject(sub); + } + } + + const chain = ['clean']; + + if (!argv.skipCleanUp) { + chain.push('prune'); + } + + if (argv.serial) { + chain.push(...chainProducts); // serially + } else { + const par = {}; + chainProducts.forEach((c) => { + par[c.name] = c; + }); + chain.push(par); // as object = parallel + } + // result of the promise is an array of partial docker compose files + + chain.push('compose'); + if (argv.pushTo) { + chain.push('push'); + } + chain.push('summary'); + + // XX. catch all error handling + const catchErrors = (error) => { console.error('ERROR extra building ', error); + // rename back + cleanUp(); process.exit(1); - }); + }; + + if (argv.help) { + showHelp(steps, chain); + cleanUp(); + process.exit(0); + } + + if (argv._.length > 0) { + // explicit chain replace computed one + chain.splice(0, chain.length, ...argv._); + } + + console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); + const toExecute = asChain(steps, chain); + const launch = runChain(toExecute, catchErrors); + if (!argv.dryRun) { + launch(); + } } diff --git a/package.json b/package.json index ff3f170..3f7e859 100644 --- a/package.json +++ b/package.json @@ -4,17 +4,17 @@ "homepage": "https://phovea.caleydo.org", "version": "1.0.0-SNAPSHOT", "author": { - "name": "Samuel Gratzl", - "email": "samuel_gratzl@gmx.at", - "url": "" + "name": "datavisyn", + "email": "contact@datavisyn.io", + "url": "https://www.datavisyn.io/" }, "license": "BSD-3-Clause", "bugs": { - "url": "https://github.com/Caleydo/gapminder_product/issues" + "url": "https://github.com/datavisyn/gapminder_product/issues" }, "repository": { "type": "git", - "url": "https://github.com/Caleydo/gapminder_product.git" + "url": "git@github.com:Caleydo/gapminder_product.git" }, "scripts": { "build": "node build.js --skipTests", @@ -25,8 +25,8 @@ "bluebird": "3.4.6", "chalk": "1.1.3", "fs-extra": "^1.0.0", - "generator-phovea": "github:phovea/generator-phovea#develop", - "lodash": "4.17.10", + "generator-phovea": "^3.1.0", + "lodash": "4.17.14", "mkdirp": "0.5.1", "yamljs": "0.2.8", "yargs-parser": "4.2.0", From 987c3e5e7c1383a22ae8998be19534fd27a25453 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:46:00 +0100 Subject: [PATCH 10/34] update yeoman-environment --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3f7e859..665fdad 100644 --- a/package.json +++ b/package.json @@ -30,6 +30,6 @@ "mkdirp": "0.5.1", "yamljs": "0.2.8", "yargs-parser": "4.2.0", - "yeoman-environment": "1.6.6" + "yeoman-environment": "2.7.0" } } From 0c59d2ce102aed776b995d5e75711d78368a2f8e Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:49:13 +0100 Subject: [PATCH 11/34] update awscli version --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2cc66a5..a555232 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -30,15 +30,15 @@ jobs: name: Show installed npm dependencies command: npm list --depth=1 || true - restore_cache: - key: awscli-1.11.113 + key: awscli-1.16.312 - run: name: Install AWS CLI command: | virtualenv ~/venv . ~/venv/bin/activate - pip install awscli==1.11.113 + pip install awscli==1.16.312 - save_cache: - key: awscli-1.11.113 + key: awscli-1.16.312 paths: - ~/venv - run: @@ -79,7 +79,7 @@ jobs: export AWS_DEFAULT_REGION=eu-central-1 baseName=${CIRCLE_PROJECT_REPONAME%_product} # list repos filter to just the one of this product and delete untagged ones - aws ecr describe-repositories --output text | cut -f5 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done + aws ecr describe-repositories --output text | cut -f6 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done - deploy: name: Restart AWS task # assumes the task definition is called - command: | From c151d9422c1c75d3c5cf3885d9f0a4a5fb46efc8 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 08:53:49 +0100 Subject: [PATCH 12/34] run `yo phovea:udpate` --- .circleci/config.yml | 60 +++- .yo-rc.json | 10 +- Jenkinsfile | 6 +- README.md | 10 +- build.js | 749 +++++++++++++++++++++++++++++++++---------- package.json | 12 +- 6 files changed, 655 insertions(+), 192 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a3e8144..a0e689e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,24 +3,36 @@ jobs: build: working_directory: ~/phovea docker: - - image: caleydo/phovea_circle_python:1.0 - - image: docker:17.05.0-ce-git + - image: caleydo/phovea_circleci_python:v3.0 steps: - checkout - setup_remote_docker + - run: + name: Show Node.js and npm version + command: | + node -v + npm -v + - run: + name: Show Python and pip version + command: | + python --version + pip --version - restore_cache: - key: dependency-cache2-{{ checksum "package.json" }} + key: dependency-cache-{{ checksum "package.json" }} - run: - name: install-npm-wee + name: Install npm dependencies command: npm install - save_cache: - key: dependency-cache2-{{ checksum "package.json" }} + key: dependency-cache-{{ checksum "package.json" }} paths: - ./node_modules + - run: + name: Show installed npm dependencies + command: npm list --depth=1 || true - restore_cache: key: awscli-1.11.113 - run: - name: install-aws-cli + name: Install AWS CLI command: | virtualenv ~/venv . ~/venv/bin/activate @@ -30,7 +42,7 @@ jobs: paths: - ~/venv - run: - name: login ecr and docker hub + name: Login AWS ECR and DockerHub command: | . ~/venv/bin/activate cat > ~/.dockercfg << EOF @@ -44,7 +56,7 @@ jobs: login="$(aws ecr get-login --no-include-email)" ${login} - deploy: - name: build and deploy + name: Build and deploy command: | . ~/venv/bin/activate case $CIRCLE_BRANCH in @@ -59,9 +71,9 @@ jobs: node build.js --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo - store_artifacts: path: build - prefix: build + destination: build - deploy: - name: cleanup untagged aws repo + name: Cleanup untagged AWS repositories command: | . ~/venv/bin/activate export AWS_DEFAULT_REGION=eu-central-1 @@ -69,7 +81,7 @@ jobs: # list repos filter to just the one of this product and delete untagged ones aws ecr describe-repositories --output text | cut -f5 | grep "caleydo/${baseName}" | while read line; do aws ecr list-images --repository-name $line --filter tagStatus=UNTAGGED --query 'imageIds[*]' --output text | while read imageId; do aws ecr batch-delete-image --output text --repository-name $line --image-ids imageDigest=$imageId; done; done - deploy: - name: restart aws #assumes the task definition is called - + name: Restart AWS task # assumes the task definition is called - command: | . ~/venv/bin/activate export AWS_DEFAULT_REGION=eu-central-1 @@ -89,3 +101,29 @@ jobs: fi aws --output text ecs run-task --cluster caleydo --task-definition ${awsFamily} --started-by CircleCIAutoUpdate fi +workflows: + version: 2 +# build-nightly: +# triggers: +# - schedule: +# cron: "15 1 * * 1-5" # "At 01:15 on every day-of-week from Monday through Friday.”, see: https://crontab.guru/#15_1_*_*_1-5 +# filters: +# branches: +# only: +# - develop +# jobs: +# - build + build-branch: + jobs: + - build: + filters: + tags: + ignore: /^v.*/ + build-tag: + jobs: + - build: + filters: + branches: + ignore: /.*/ + tags: + only: /^v.*/ diff --git a/.yo-rc.json b/.yo-rc.json index 8bb7585..030475d 100644 --- a/.yo-rc.json +++ b/.yo-rc.json @@ -2,8 +2,14 @@ "generator-phovea": { "type": "product", "name": "gapminder_product", - "author": "Samuel Gratzl", + "author": "datavisyn", "today": "Fri, 03 Feb 2017 08:30:49 GMT", - "githubAccount": "Caleydo" + "githubAccount": "datavisyn", + "promptValues": { + "authorName": "datavisyn", + "authorEmail": "contact@datavisyn.io", + "authorUrl": "https://www.datavisyn.io/", + "githubAccount": "datavisyn" + } } } \ No newline at end of file diff --git a/Jenkinsfile b/Jenkinsfile index 82f0a16..44e9742 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -21,9 +21,11 @@ node { try { withCredentials([usernameColonPassword(credentialsId: 'PHOVEA_GITHUB_CREDENTIALS', variable: 'PHOVEA_GITHUB_CREDENTIALS')]) { docker.withRegistry("https://922145058410.dkr.ecr.eu-central-1.amazonaws.com", "ecr:eu-central-1:PHOVEA_AWS_CREDENTIALS") { - wrap([$class: 'Xvfb']) { - sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=latest --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' + docker.withRegistry("", "PHOVEA_DOCKER_HUB_CREDENTIALS") { + wrap([$class: 'Xvfb']) { + sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=latest --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' } + } } } currentBuild.result = "SUCCESS" diff --git a/README.md b/README.md index 64fcc8d..979dc21 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ Installation ------------ ``` -git clone https://github.com/Caleydo/gapminder_product.git +git clone git@github.com:Caleydo/gapminder_product.git cd gapminder_product npm install ``` @@ -38,7 +38,7 @@ This repository is part of **[Phovea](http://phovea.caleydo.org/)**, a platform [phovea-url]: https://phovea.caleydo.org [npm-image]: https://badge.fury.io/js/gapminder_product.svg [npm-url]: https://npmjs.org/package/gapminder_product -[travis-image]: https://travis-ci.org/Caleydo/gapminder_product.svg?branch=master -[travis-url]: https://travis-ci.org/Caleydo/gapminder_product -[daviddm-image]: https://david-dm.org/Caleydo/gapminder_product/status.svg -[daviddm-url]: https://david-dm.org/Caleydo/gapminder_product +[travis-image]: https://travis-ci.org/datavisyn/gapminder_product.svg?branch=master +[travis-url]: https://travis-ci.org/datavisyn/gapminder_product +[daviddm-image]: https://david-dm.org/datavisyn/gapminder_product/status.svg +[daviddm-url]: https://david-dm.org/datavisyn/gapminder_product diff --git a/build.js b/build.js index d68016f..f6283d2 100644 --- a/build.js +++ b/build.js @@ -7,7 +7,9 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); +// see show help const argv = require('yargs-parser')(process.argv.slice(2)); + const quiet = argv.quiet !== undefined; const now = new Date(); @@ -15,32 +17,116 @@ const prefix = (n) => n < 10 ? ('0' + n) : n.toString(); const buildId = `${now.getUTCFullYear()}${prefix(now.getUTCMonth())}${prefix(now.getUTCDate())}-${prefix(now.getUTCHours())}${prefix(now.getUTCMinutes())}${prefix(now.getUTCSeconds())}`; pkg.version = pkg.version.replace('SNAPSHOT', buildId); const env = Object.assign({}, process.env); +const productName = pkg.name.replace('_product', ''); + +function showHelp(steps, chain) { + console.info(`node build.js -- step1 step2 +possible options: + * --quiet ... reduce log messages + * --serial ... build elements sequentially + * --skipTests ... skip tests + * --injectVersion ... injects the product version into the package.json of the built component + * --useSSH ... clone via ssh instead of https + * --skipCleanUp ... skip cleaning up old docker images + * --skipSaveImage ... skip saving the generated docker images + * --pushTo ... push docker images to the given registry + * --noDefaultTags ... don't push generated default tag : + * --pushExtra ... push additional custom tag: e.g., --pushExtra=develop + * --forceLabel ... force to use the label even only a single service exists + * --dryRun ... just compute chain no execution + * --help ... show this help message + +arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed + `); + + steps = Object.keys(steps); + const primary = steps.filter((d) => !d.includes(':')).sort((a, b) => a.localeCompare(b)); + const secondary = steps.filter((d) => d.includes(':')).sort((a, b) => a.localeCompare(b)); + + console.info('possible primary steps:\n ', primary.join('\n ')); + console.info('possible secondary steps:\n ', secondary.join('\n ')); + + console.info('default chain:\n', JSON.stringify(chain, null, ' ')); +} +/** + * generates a repo url to clone depending on the argv.useSSH option + * @param {string} url the repo url either in git@ for https:// form + * @returns the clean repo url + */ function toRepoUrl(url) { + if (url.startsWith('git@')) { + if (argv.useSSH) { + return url; + } + // have an ssh url need an http url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `https://${m[3]}/${m[4]}.git`; + } + if (url.startsWith('http')) { + if (!argv.useSSH) { + return url; + } + // have a http url need an ssh url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `git@${m[2]}:${m[4]}.git`; + } + if (!url.includes('/')) { + url = `Caleydo/${url}`; + } if (argv.useSSH) { - return `git@github.com:${url}.git` + return `git@github.com:${url}.git`; } - return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; + return `https://github.com/${url}.git`; } +/** + * guesses the credentials environment variable based on the given repository hostname + * @param {string} repo + */ +function guessUserName(repo) { + // extract the host + const host = repo.match(/:\/\/([^/]+)/)[1]; + const hostClean = host.replace(/\./g, '_').toUpperCase(); + // e.g. GITHUB_COM_CREDENTIALS + const envVar = process.env[`${hostClean}_CREDENTIALS`]; + if (envVar) { + return envVar; + } + return process.env.PHOVEA_GITHUB_CREDENTIALS; +} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; - if (repo.includes('git@github.com') || !username_and_password) { + if (repo.startsWith('git@')) { // ssh return repo; } - return repo.replace('://', `://${username_and_password}@`); + const usernameAndPassword = guessUserName(repo); + if (!usernameAndPassword) { // ssh or no user given + return repo; + } + return repo.replace('://', `://${usernameAndPassword}@`); } - function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/(.*)\.git/)[0] + return url.match(/\/([^/]+)\.git/)[0]; } return url.slice(url.lastIndexOf('/') + 1); } +/** + * deep merge with array union + * @param {*} target + * @param {*} source + */ +function mergeWith(target, source) { + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + _.mergeWith(target, source, mergeArrayUnion); + return target; +} + function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -49,7 +135,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - const request = http.get(url, (response) => { + http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -72,11 +158,13 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - switch(desc.type) { - case 'url': - const destName = toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); - case 'repo': + desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); + switch (desc.type) { + case 'url': { + desc.name = desc.name || toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); + } + case 'repo': { desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -85,6 +173,7 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); + } default: console.error('unknown data type:', desc.type); return null; @@ -96,25 +185,36 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options + * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); + const out = []; + if (p.stdout) { + p.stdout.on('data', (chunk) => out.push(chunk)); + } + if (p.stderr) { + p.stderr.on('data', (chunk) => out.push(chunk)); + } p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - reject(`${cmd} failed with status code ${code} ${signal}`); + if (args.quiet) { + // log output what has been captured + console.log(out.join('\n')); + } + reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); } }); }); } - /** * run npm with the given args * @param cwd working directory @@ -151,23 +251,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code == 0 ? resolve() : reject(code)); + p2.on('close', (code) => code === 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages(productName) { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); +function dockerRemoveImages() { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve, reject) => { + return new Promise((resolve) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code == 0) { + if (code === 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -197,12 +297,13 @@ function yo(generator, options, cwd, args) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); - yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); const _args = Array.isArray(args) ? args.join(' ') : args || ''; return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + yeomanEnv.lookup(() => { + yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + }); } catch (e) { console.error('error', e, e.stack); reject(e); @@ -215,6 +316,7 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; + return yo('clone-repo', { branch: p.branch, extras: '--depth 1', @@ -224,39 +326,31 @@ function cloneRepo(p, cwd) { } function resolvePluginType(p, dir) { + if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { + p.pluginType = 'lib'; + p.isHybridType = false; + return; + } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } -function preBuild(p, dir) { - const hasAdditional = p.additional.length > 0; - let act = fs.emptyDirAsync(dir) - .then(() => cloneRepo(p, dir)) - .then(() => resolvePluginType(p, dir)); - if (hasAdditional) { - act = act - .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); - } - return act; -} - function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); - } else { - return Promise.resolve({}); } + return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - //copy data from first service + // copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -272,104 +366,79 @@ function patchComposeFile(p, composeTemplate) { return r; } - -function postBuild(p, dir, buildInSubDir) { - return Promise.resolve(null) - .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) - .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) - .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) - .then(mergeCompose); +function patchDockerfile(p, dockerFile) { + if (!p.baseImage) { + return null; + } + return fs.readFileAsync(dockerFile).then((content) => { + content = content.toString(); + // patch the Dockerfile by replacing the FROM statement + const r = /^\s*FROM (.+)\s*$/igm; + const fromImage = r.exec(content)[1]; + console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); + content = content.replace(r, `FROM ${p.baseImage}`); + return fs.writeFileAsync(dockerFile, content); + }); } -function buildWebApp(p, dir) { - console.log(dir, chalk.blue('building web application:'), p.label); - const name = p.name; - const hasAdditional = p.additional.length > 0; - let act = preBuild(p, dir); - //let act = Promise.resolve(null); - if (hasAdditional) { - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)) - .then(() => npm(dir, 'install')); - //test all modules - if (hasAdditional && !argv.skipTests) { - act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); +function patchWorkspace(p) { + // prepend docker_script in the workspace + if (fs.existsSync('./docker_script.sh')) { + console.log('patch workspace and prepend docker_script.sh'); + let content = fs.readFileSync('./docker_script.sh').toString(); + if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { + content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); } - act = act - .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); - } else { - act = act - .then(() => npm(dir + '/' + name, 'install')) - .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); + fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); } - return act - .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) - .then(postBuild.bind(null, p, dir, true)); -} - -function buildServerApp(p, dir) { - console.log(dir, chalk.blue('building service package:'), p.label); - const name = p.name; - - let act = preBuild(p, dir); - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)); - if (!argv.skipTests) { - act = act - .then(() => console.log(chalk.yellow('create test environment'))) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); + if (argv.injectVersion) { + const pkgfile = `${p.tmpDir}/${p.name}/package.json`; + if (fs.existsSync(pkgfile)) { + const ppkg = require(pkgfile); + ppkg.version = pkg.version; + fs.writeJSONSync(pkgfile, ppkg); + } else { + console.warn('cannot inject version, main package.json not found'); + } } - act = act - .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) - .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); - - //copy all together - act = act - .then(() => fs.ensureDirAsync(`${dir}/build/source`)) - .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); - - //copy data packages - act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); - //let act = Promise.resolve([]); - - //copy main deploy thing and create a docker out of it - return act - .then(() => fs.ensureDirAsync(`${dir}/deploy`)) - .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) - .then(postBuild.bind(null, p, dir, false)); -} - -function buildImpl(d, dir) { - switch (d.type) { - case 'static': - case 'web': - return buildWebApp(d, dir); - case 'api': - d.name = d.name || 'phovea_server'; - return buildServerApp(d, dir); - case 'service': - return buildServerApp(d, dir); - default: - console.error(chalk.red('unknown product type: ' + d.type)); - return Promise.resolve(null); + // inject extra phovea.js + if (fs.existsSync('./phovea.js')) { + console.log('patch workspace and add workspace phovea.js'); + let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); + fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); + + registry += `\n\n + import {register} from 'phovea_core/src/plugin'; + register('__product',require('./phovea.js')); + `; + fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); } } function mergeCompose(composePartials) { let dockerCompose = {}; - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); + composePartials.forEach((c) => mergeWith(dockerCompose, c)); return dockerCompose; } -function buildCompose(descs, composePartials) { +function buildComposePartials(descs) { + const validDescs = descs.filter((d) => !d.error); + + // merge a big compose file including all + return Promise.all(validDescs.map((p) => { + return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) + .then((partials) => { + p.composePartial = mergeCompose(partials); + }); + })); +} + +function buildCompose(descs, dockerComposePatch) { console.log('create docker-compose.yml'); - const dockerCompose = mergeCompose(composePartials); + + const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -386,6 +455,23 @@ function buildCompose(descs, composePartials) { services[w].links.push(`${s.label}:${s.name}`); }); }); + + if (services._host) { + // inline _host to apis + const host = services._host; + delete services._host; + api.forEach((s) => { + services[s] = mergeCompose([host, services[s]]); + }); + } + + Object.keys(dockerComposePatch.services).forEach((service) => { + if (services[service] !== undefined) { + console.log(`patch generated docker-compose file for ${service}`); + mergeWith(services[service], dockerComposePatch.services[service]); + } + }); + const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -402,7 +488,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { //push additional custom prefix without the version + if (argv.pushExtra) { // push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -415,6 +501,214 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } +function loadPatchFile() { + const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); + if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { + return {services: {}}; + } + const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); + const yaml = require('yamljs'); + const r = yaml.parse(content.toString()); + if (!r.services) { + r.services = {}; + } + return r; +} + +function fillDefaults(descs, dockerComposePatch) { + const singleService = descs.length === 1 && (argv.forceLabel === undefined); + + descs.forEach((d, i) => { + // default values + d.additional = d.additional || []; + d.data = d.data || []; + d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); + d.label = d.label || d.name; + d.symlink = d.symlink || null; // default value + d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; + // incorporate patch file + if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { + // use a different base image to build the item + d.baseImage = dockerComposePatch.services[d.label].image; + delete dockerComposePatch.services[d.label].image; + } + // include hint in the tmp directory which one is it + d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; + }); + + return descs; +} + +function asChain(steps, chain) { + if (chain.length === 0) { + return []; + } + const possibleSteps = Object.keys(steps); + + const callable = (c) => { + if (typeof c === 'function') { + return c; + } + + if (typeof c === 'string') { + // simple lookup + if (!possibleSteps.includes(c)) { + console.error('invalid step:', c); + throw new Error('invalid step: ' + c); + } + return callable(steps[c]); + } + + if (Array.isArray(c)) { // sequential sub started + const sub = c.map(callable); + return () => { + console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); + let step = Promise.resolve(); + for (const s of sub) { + step = step.then(s); + } + return step; + }; + } + // parallel = object + const sub = Object.keys(c).map((ci) => callable(c[ci])); + return () => { + console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); + return Promise.all(sub.map((d) => d())); // run sub lazy combined with all + }; + }; + return chain.map(callable); +} + +function runChain(chain, catchErrors) { + let start = null; + let step = new Promise((resolve) => { + start = resolve; + }); + + for (const c of chain) { + step = step.then(c); + } + + step.catch(catchErrors); + + return () => { + start(); // resolve first to start chain + return step; // return last result + }; +} + +function strObject(items) { + const obj = {}; + for (const item of items) { + obj[item] = item; + } + return obj; +} + +function buildDockerImage(p) { + const buildInSubDir = p.type === 'web' || p.type === 'static'; + let buildArgs = ''; + // pass through http_proxy, no_proxy, and https_proxy env variables + for (const key of Object.keys(process.env)) { + const lkey = key.toLowerCase(); + if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { + // pass through + buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; + } + } + + // patch the docker file with the with an optional given baseImage + return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) + // create the container image + .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) + // tag the container image + .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); +} + +function createWorkspace(p) { + return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) + .then(() => patchWorkspace(p)); +} + +function installWebDependencies(p) { + return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); +} + +function cleanUpWebDependencies(p) { + return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); +} + +function resolvePluginTypes(p) { + if (p.pluginType) { + return Promise.resolve(); // already resolved + } + if (p.additional.length === 0) { + return resolvePluginType(p, p.tmpDir); + } + return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); +} + +function testWebAdditionals(p) { + return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); +} + +function buildWeb(p) { + const hasAdditional = p.additional.length > 0; + + let step; + if (hasAdditional) { + step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); + } else { + step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); + } + // move to target directory + return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); +} + +function installPythonTestDependencies(p) { + console.log(chalk.yellow('create test environment')); + return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); +} + +function buildServer(p) { + let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); + for (const pi of p.additional) { + act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); + } + + // copy all together + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); + + // copy main deploy thing and create a docker out of it + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); + + return act; +} + +function downloadServerDataFiles(p) { + if (!argv.serial) { + return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); + } + // serial + let act = Promise.resolve(); + for (const d of p.data) { + act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); + } + return act; +} + +function cleanWorkspace(descs) { + console.log(chalk.yellow('clean workspace')); + return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); +} + if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -422,59 +716,182 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { - // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const descs = require('./phovea_product.json'); - const singleService = descs.length === 1; - const productName = pkg.name.replace('_product', ''); - - - fs.emptyDirAsync('build') - .then(dockerRemoveImages.bind(this, productName)) - // move my own .yo-rc.json to avoid a conflict - .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) - .then(() => { - const buildOne = (d, i) => { - d.additional = d.additional || []; //default values - d.data = d.data || []; - d.name = d.name || fromRepoUrl(d.repo); - d.label = d.label || d.name; - if (singleService) { - d.image = `${productName}:${pkg.version}`; - } else { - d.image = `${productName}/${d.label}:${pkg.version}`; - } - let wait = buildImpl(d, './tmp' + i); - wait.catch((error) => { - d.error = error; - console.error('ERROR building ', d, error); - }); - return wait; - }; - if (argv.serial) { - let r = Promise.resolve([]); - for (let i = 0; i < descs.length; ++i) { - r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); - } - return r; - } else { - return Promise.all(descs.map(buildOne)); - } - }) - .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) - .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) - .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) - .then(() => { + const dockerComposePatch = loadPatchFile(); + const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); + + if (fs.existsSync('.yo-rc.json')) { + fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); + } + fs.ensureDirSync('build'); + + const cleanUp = () => { + if (fs.existsSync('.yo-rc_tmp.json')) { + fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); + } + }; + + const catchProductBuild = (p, act) => { + // no chaining to keep error + act.catch((error) => { + p.error = error; + console.error('ERROR building ', p.name, error); + }); + return act; + }; + + const steps = { + clean: () => cleanWorkspace(descs), + prune: dockerRemoveImages, + compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), + push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), + summary: () => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); + cleanUp(); if (anyErrors) { process.exit(1); } - }).catch((error) => { + } + }; + + const webTypes = ['static', 'web']; + const serverTypes = ['api', 'service']; + + const chainProducts = []; + for (let i = 0; i < descs.length; ++i) { + const p = descs[i]; + const suffix = p.name; + const hasAdditional = p.additional.length > 0; + const isWeb = webTypes.includes(p.type); + const isServer = serverTypes.includes(p.type); + + if (!isWeb && !isServer) { + console.error(chalk.red('unknown product type: ' + p.type)); + continue; + } + + fs.ensureDirSync(p.tmpDir); + + // clone repo + const subSteps = []; + steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); + subSteps.push(`clone:${suffix}`); + + if (hasAdditional) { + // clone extras + const cloneKeys = []; + for (const pi of p.additional) { + const key = `clone:${suffix}:${pi.name}`; + steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); + cloneKeys.push(key); + } + + if (argv.serial) { + subSteps.push(...cloneKeys); + } else { + subSteps.push(strObject(cloneKeys)); + } + } + + const needsWorskpace = (isWeb && hasAdditional) || isServer; + steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; + + if (isWeb) { + steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); + } else { // server + steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); + } + steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; + steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); + steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); + steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; + steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); + steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); + + subSteps.push(`prepare:${suffix}`); + subSteps.push(`install:${suffix}`); + if (!argv.skipTests) { + subSteps.push(`test:${suffix}`); + } + subSteps.push(`build:${suffix}`); + if (isServer && p.data.length > 0) { + subSteps.push(`data:${suffix}`); + } + if (isWeb) { + subSteps.push(`postbuild:${suffix}`); + } + subSteps.push(`image:${suffix}`); + if (!argv.skipSaveImage) { + subSteps.push(`save:${suffix}`); + } + + steps[`product:${suffix}`] = subSteps; + subSteps.name = `product:${suffix}`; + chainProducts.push(subSteps); + } + + // create some meta steps + { + const stepNames = Object.keys(steps); + for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { + const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); + if (sub.length <= 0) { + continue; + } + steps[meta] = argv.serial ? sub : strObject(sub); + } + } + + const chain = ['clean']; + + if (!argv.skipCleanUp) { + chain.push('prune'); + } + + if (argv.serial) { + chain.push(...chainProducts); // serially + } else { + const par = {}; + chainProducts.forEach((c) => { + par[c.name] = c; + }); + chain.push(par); // as object = parallel + } + // result of the promise is an array of partial docker compose files + + chain.push('compose'); + if (argv.pushTo) { + chain.push('push'); + } + chain.push('summary'); + + // XX. catch all error handling + const catchErrors = (error) => { console.error('ERROR extra building ', error); + // rename back + cleanUp(); process.exit(1); - }); + }; + + if (argv.help) { + showHelp(steps, chain); + cleanUp(); + process.exit(0); + } + + if (argv._.length > 0) { + // explicit chain replace computed one + chain.splice(0, chain.length, ...argv._); + } + + console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); + const toExecute = asChain(steps, chain); + const launch = runChain(toExecute, catchErrors); + if (!argv.dryRun) { + launch(); + } } diff --git a/package.json b/package.json index ff3f170..51f0f7a 100644 --- a/package.json +++ b/package.json @@ -4,17 +4,17 @@ "homepage": "https://phovea.caleydo.org", "version": "1.0.0-SNAPSHOT", "author": { - "name": "Samuel Gratzl", - "email": "samuel_gratzl@gmx.at", - "url": "" + "name": "datavisyn", + "email": "contact@datavisyn.io", + "url": "https://www.datavisyn.io/" }, "license": "BSD-3-Clause", "bugs": { - "url": "https://github.com/Caleydo/gapminder_product/issues" + "url": "https://github.com/datavisyn/gapminder_product/issues" }, "repository": { "type": "git", - "url": "https://github.com/Caleydo/gapminder_product.git" + "url": "git@github.com:Caleydo/gapminder_product.git" }, "scripts": { "build": "node build.js --skipTests", @@ -26,7 +26,7 @@ "chalk": "1.1.3", "fs-extra": "^1.0.0", "generator-phovea": "github:phovea/generator-phovea#develop", - "lodash": "4.17.10", + "lodash": "4.17.14", "mkdirp": "0.5.1", "yamljs": "0.2.8", "yargs-parser": "4.2.0", From 73aa27c3d242177adc3defc442efcb75631388cb Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:00:23 +0100 Subject: [PATCH 13/34] revert changes for README.md --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 979dc21..64fcc8d 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ Installation ------------ ``` -git clone git@github.com:Caleydo/gapminder_product.git +git clone https://github.com/Caleydo/gapminder_product.git cd gapminder_product npm install ``` @@ -38,7 +38,7 @@ This repository is part of **[Phovea](http://phovea.caleydo.org/)**, a platform [phovea-url]: https://phovea.caleydo.org [npm-image]: https://badge.fury.io/js/gapminder_product.svg [npm-url]: https://npmjs.org/package/gapminder_product -[travis-image]: https://travis-ci.org/datavisyn/gapminder_product.svg?branch=master -[travis-url]: https://travis-ci.org/datavisyn/gapminder_product -[daviddm-image]: https://david-dm.org/datavisyn/gapminder_product/status.svg -[daviddm-url]: https://david-dm.org/datavisyn/gapminder_product +[travis-image]: https://travis-ci.org/Caleydo/gapminder_product.svg?branch=master +[travis-url]: https://travis-ci.org/Caleydo/gapminder_product +[daviddm-image]: https://david-dm.org/Caleydo/gapminder_product/status.svg +[daviddm-url]: https://david-dm.org/Caleydo/gapminder_product From d4ea0029c923a5fe60d1f39c488f453b53e30d89 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:00:41 +0100 Subject: [PATCH 14/34] remove Jenkinsfile --- Jenkinsfile | 46 ---------------------------------------------- 1 file changed, 46 deletions(-) delete mode 100644 Jenkinsfile diff --git a/Jenkinsfile b/Jenkinsfile deleted file mode 100644 index 44e9742..0000000 --- a/Jenkinsfile +++ /dev/null @@ -1,46 +0,0 @@ -node { - stage('Checkout') { - checkout scm - } - - stage('Before Install') { - def nodeHome = tool 'node-v7' - env.PATH="${env.PATH}:${nodeHome}/bin" - def dockerHome = tool 'docker' - env.PATH="${env.PATH}:${dockerHome}/bin" - } - - stage('Install') { - sh 'node -v' - sh 'npm --version' - sh 'docker --version' - sh 'npm install' - } - - stage('Build') { - try { - withCredentials([usernameColonPassword(credentialsId: 'PHOVEA_GITHUB_CREDENTIALS', variable: 'PHOVEA_GITHUB_CREDENTIALS')]) { - docker.withRegistry("https://922145058410.dkr.ecr.eu-central-1.amazonaws.com", "ecr:eu-central-1:PHOVEA_AWS_CREDENTIALS") { - docker.withRegistry("", "PHOVEA_DOCKER_HUB_CREDENTIALS") { - wrap([$class: 'Xvfb']) { - sh 'node build.js --skipTests --skipSaveImage --noDefaultTags --pushExtra=latest --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo' - } - } - } - } - currentBuild.result = "SUCCESS" - } catch (e) { - // if any exception occurs, mark the build as failed - currentBuild.result = 'FAILURE' - throw e - } finally { - // always clean up - sh 'npm prune' - sh 'rm node_modules -rf' - } - } - - stage('Post Build') { - archiveArtifacts artifacts: 'build/*' - } -} From 7a9f12b16f936da587cdeca07e02510ab426cba2 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:02:45 +0100 Subject: [PATCH 15/34] revert changes for _build.js_ --- build.js | 749 ++++++++++++------------------------------------------- 1 file changed, 166 insertions(+), 583 deletions(-) diff --git a/build.js b/build.js index f6283d2..d68016f 100644 --- a/build.js +++ b/build.js @@ -7,9 +7,7 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); -// see show help const argv = require('yargs-parser')(process.argv.slice(2)); - const quiet = argv.quiet !== undefined; const now = new Date(); @@ -17,116 +15,32 @@ const prefix = (n) => n < 10 ? ('0' + n) : n.toString(); const buildId = `${now.getUTCFullYear()}${prefix(now.getUTCMonth())}${prefix(now.getUTCDate())}-${prefix(now.getUTCHours())}${prefix(now.getUTCMinutes())}${prefix(now.getUTCSeconds())}`; pkg.version = pkg.version.replace('SNAPSHOT', buildId); const env = Object.assign({}, process.env); -const productName = pkg.name.replace('_product', ''); - -function showHelp(steps, chain) { - console.info(`node build.js -- step1 step2 -possible options: - * --quiet ... reduce log messages - * --serial ... build elements sequentially - * --skipTests ... skip tests - * --injectVersion ... injects the product version into the package.json of the built component - * --useSSH ... clone via ssh instead of https - * --skipCleanUp ... skip cleaning up old docker images - * --skipSaveImage ... skip saving the generated docker images - * --pushTo ... push docker images to the given registry - * --noDefaultTags ... don't push generated default tag : - * --pushExtra ... push additional custom tag: e.g., --pushExtra=develop - * --forceLabel ... force to use the label even only a single service exists - * --dryRun ... just compute chain no execution - * --help ... show this help message - -arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed - `); - - steps = Object.keys(steps); - const primary = steps.filter((d) => !d.includes(':')).sort((a, b) => a.localeCompare(b)); - const secondary = steps.filter((d) => d.includes(':')).sort((a, b) => a.localeCompare(b)); - - console.info('possible primary steps:\n ', primary.join('\n ')); - console.info('possible secondary steps:\n ', secondary.join('\n ')); - - console.info('default chain:\n', JSON.stringify(chain, null, ' ')); -} -/** - * generates a repo url to clone depending on the argv.useSSH option - * @param {string} url the repo url either in git@ for https:// form - * @returns the clean repo url - */ function toRepoUrl(url) { - if (url.startsWith('git@')) { - if (argv.useSSH) { - return url; - } - // have an ssh url need an http url - const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); - return `https://${m[3]}/${m[4]}.git`; - } - if (url.startsWith('http')) { - if (!argv.useSSH) { - return url; - } - // have a http url need an ssh url - const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); - return `git@${m[2]}:${m[4]}.git`; - } - if (!url.includes('/')) { - url = `Caleydo/${url}`; - } if (argv.useSSH) { - return `git@github.com:${url}.git`; + return `git@github.com:${url}.git` } - return `https://github.com/${url}.git`; + return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; } -/** - * guesses the credentials environment variable based on the given repository hostname - * @param {string} repo - */ -function guessUserName(repo) { - // extract the host - const host = repo.match(/:\/\/([^/]+)/)[1]; - const hostClean = host.replace(/\./g, '_').toUpperCase(); - // e.g. GITHUB_COM_CREDENTIALS - const envVar = process.env[`${hostClean}_CREDENTIALS`]; - if (envVar) { - return envVar; - } - return process.env.PHOVEA_GITHUB_CREDENTIALS; -} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - if (repo.startsWith('git@')) { // ssh + const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; + if (repo.includes('git@github.com') || !username_and_password) { return repo; } - const usernameAndPassword = guessUserName(repo); - if (!usernameAndPassword) { // ssh or no user given - return repo; - } - return repo.replace('://', `://${usernameAndPassword}@`); + return repo.replace('://', `://${username_and_password}@`); } + function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/([^/]+)\.git/)[0]; + return url.match(/\/(.*)\.git/)[0] } return url.slice(url.lastIndexOf('/') + 1); } -/** - * deep merge with array union - * @param {*} target - * @param {*} source - */ -function mergeWith(target, source) { - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - _.mergeWith(target, source, mergeArrayUnion); - return target; -} - function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -135,7 +49,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - http.get(url, (response) => { + const request = http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -158,13 +72,11 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); - switch (desc.type) { - case 'url': { - desc.name = desc.name || toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); - } - case 'repo': { + switch(desc.type) { + case 'url': + const destName = toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); + case 'repo': desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -173,7 +85,6 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); - } default: console.error('unknown data type:', desc.type); return null; @@ -185,36 +96,25 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options - * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); - const out = []; - if (p.stdout) { - p.stdout.on('data', (chunk) => out.push(chunk)); - } - if (p.stderr) { - p.stderr.on('data', (chunk) => out.push(chunk)); - } + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - if (args.quiet) { - // log output what has been captured - console.log(out.join('\n')); - } - reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); + reject(`${cmd} failed with status code ${code} ${signal}`); } }); }); } + /** * run npm with the given args * @param cwd working directory @@ -251,23 +151,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code === 0 ? resolve() : reject(code)); + p2.on('close', (code) => code == 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages() { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); +function dockerRemoveImages(productName) { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve) => { + return new Promise((resolve, reject) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code === 0) { + if (code == 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -297,13 +197,12 @@ function yo(generator, options, cwd, args) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); + yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); const _args = Array.isArray(args) ? args.join(' ') : args || ''; return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.lookup(() => { - yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); - }); + yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); } catch (e) { console.error('error', e, e.stack); reject(e); @@ -316,7 +215,6 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; - return yo('clone-repo', { branch: p.branch, extras: '--depth 1', @@ -326,31 +224,39 @@ function cloneRepo(p, cwd) { } function resolvePluginType(p, dir) { - if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { - p.pluginType = 'lib'; - p.isHybridType = false; - return; - } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } +function preBuild(p, dir) { + const hasAdditional = p.additional.length > 0; + let act = fs.emptyDirAsync(dir) + .then(() => cloneRepo(p, dir)) + .then(() => resolvePluginType(p, dir)); + if (hasAdditional) { + act = act + .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); + } + return act; +} + function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); + } else { + return Promise.resolve({}); } - return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - // copy data from first service + //copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -366,79 +272,104 @@ function patchComposeFile(p, composeTemplate) { return r; } -function patchDockerfile(p, dockerFile) { - if (!p.baseImage) { - return null; - } - return fs.readFileAsync(dockerFile).then((content) => { - content = content.toString(); - // patch the Dockerfile by replacing the FROM statement - const r = /^\s*FROM (.+)\s*$/igm; - const fromImage = r.exec(content)[1]; - console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); - content = content.replace(r, `FROM ${p.baseImage}`); - return fs.writeFileAsync(dockerFile, content); - }); + +function postBuild(p, dir, buildInSubDir) { + return Promise.resolve(null) + .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) + .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) + .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) + .then(mergeCompose); } -function patchWorkspace(p) { - // prepend docker_script in the workspace - if (fs.existsSync('./docker_script.sh')) { - console.log('patch workspace and prepend docker_script.sh'); - let content = fs.readFileSync('./docker_script.sh').toString(); - if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { - content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); +function buildWebApp(p, dir) { + console.log(dir, chalk.blue('building web application:'), p.label); + const name = p.name; + const hasAdditional = p.additional.length > 0; + let act = preBuild(p, dir); + //let act = Promise.resolve(null); + if (hasAdditional) { + act = act + .then(() => yo('workspace', {noAdditionals: true}, dir)) + .then(() => npm(dir, 'install')); + //test all modules + if (hasAdditional && !argv.skipTests) { + act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); } - fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); + act = act + .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); + } else { + act = act + .then(() => npm(dir + '/' + name, 'install')) + .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); } + return act + .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) + .then(postBuild.bind(null, p, dir, true)); +} - if (argv.injectVersion) { - const pkgfile = `${p.tmpDir}/${p.name}/package.json`; - if (fs.existsSync(pkgfile)) { - const ppkg = require(pkgfile); - ppkg.version = pkg.version; - fs.writeJSONSync(pkgfile, ppkg); - } else { - console.warn('cannot inject version, main package.json not found'); - } +function buildServerApp(p, dir) { + console.log(dir, chalk.blue('building service package:'), p.label); + const name = p.name; + + let act = preBuild(p, dir); + act = act + .then(() => yo('workspace', {noAdditionals: true}, dir)); + + if (!argv.skipTests) { + act = act + .then(() => console.log(chalk.yellow('create test environment'))) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); } - // inject extra phovea.js - if (fs.existsSync('./phovea.js')) { - console.log('patch workspace and add workspace phovea.js'); - let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); - fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); - - registry += `\n\n - import {register} from 'phovea_core/src/plugin'; - register('__product',require('./phovea.js')); - `; - fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); + act = act + .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) + .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); + + //copy all together + act = act + .then(() => fs.ensureDirAsync(`${dir}/build/source`)) + .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); + + //copy data packages + act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); + //let act = Promise.resolve([]); + + //copy main deploy thing and create a docker out of it + return act + .then(() => fs.ensureDirAsync(`${dir}/deploy`)) + .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) + .then(postBuild.bind(null, p, dir, false)); +} + +function buildImpl(d, dir) { + switch (d.type) { + case 'static': + case 'web': + return buildWebApp(d, dir); + case 'api': + d.name = d.name || 'phovea_server'; + return buildServerApp(d, dir); + case 'service': + return buildServerApp(d, dir); + default: + console.error(chalk.red('unknown product type: ' + d.type)); + return Promise.resolve(null); } } function mergeCompose(composePartials) { let dockerCompose = {}; - composePartials.forEach((c) => mergeWith(dockerCompose, c)); + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); return dockerCompose; } -function buildComposePartials(descs) { - const validDescs = descs.filter((d) => !d.error); - - // merge a big compose file including all - return Promise.all(validDescs.map((p) => { - return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) - .then((partials) => { - p.composePartial = mergeCompose(partials); - }); - })); -} - -function buildCompose(descs, dockerComposePatch) { +function buildCompose(descs, composePartials) { console.log('create docker-compose.yml'); - - const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); + const dockerCompose = mergeCompose(composePartials); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -455,23 +386,6 @@ function buildCompose(descs, dockerComposePatch) { services[w].links.push(`${s.label}:${s.name}`); }); }); - - if (services._host) { - // inline _host to apis - const host = services._host; - delete services._host; - api.forEach((s) => { - services[s] = mergeCompose([host, services[s]]); - }); - } - - Object.keys(dockerComposePatch.services).forEach((service) => { - if (services[service] !== undefined) { - console.log(`patch generated docker-compose file for ${service}`); - mergeWith(services[service], dockerComposePatch.services[service]); - } - }); - const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -488,7 +402,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { // push additional custom prefix without the version + if (argv.pushExtra) { //push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -501,214 +415,6 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } -function loadPatchFile() { - const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); - if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { - return {services: {}}; - } - const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); - const yaml = require('yamljs'); - const r = yaml.parse(content.toString()); - if (!r.services) { - r.services = {}; - } - return r; -} - -function fillDefaults(descs, dockerComposePatch) { - const singleService = descs.length === 1 && (argv.forceLabel === undefined); - - descs.forEach((d, i) => { - // default values - d.additional = d.additional || []; - d.data = d.data || []; - d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); - d.label = d.label || d.name; - d.symlink = d.symlink || null; // default value - d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; - // incorporate patch file - if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { - // use a different base image to build the item - d.baseImage = dockerComposePatch.services[d.label].image; - delete dockerComposePatch.services[d.label].image; - } - // include hint in the tmp directory which one is it - d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; - }); - - return descs; -} - -function asChain(steps, chain) { - if (chain.length === 0) { - return []; - } - const possibleSteps = Object.keys(steps); - - const callable = (c) => { - if (typeof c === 'function') { - return c; - } - - if (typeof c === 'string') { - // simple lookup - if (!possibleSteps.includes(c)) { - console.error('invalid step:', c); - throw new Error('invalid step: ' + c); - } - return callable(steps[c]); - } - - if (Array.isArray(c)) { // sequential sub started - const sub = c.map(callable); - return () => { - console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); - let step = Promise.resolve(); - for (const s of sub) { - step = step.then(s); - } - return step; - }; - } - // parallel = object - const sub = Object.keys(c).map((ci) => callable(c[ci])); - return () => { - console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); - return Promise.all(sub.map((d) => d())); // run sub lazy combined with all - }; - }; - return chain.map(callable); -} - -function runChain(chain, catchErrors) { - let start = null; - let step = new Promise((resolve) => { - start = resolve; - }); - - for (const c of chain) { - step = step.then(c); - } - - step.catch(catchErrors); - - return () => { - start(); // resolve first to start chain - return step; // return last result - }; -} - -function strObject(items) { - const obj = {}; - for (const item of items) { - obj[item] = item; - } - return obj; -} - -function buildDockerImage(p) { - const buildInSubDir = p.type === 'web' || p.type === 'static'; - let buildArgs = ''; - // pass through http_proxy, no_proxy, and https_proxy env variables - for (const key of Object.keys(process.env)) { - const lkey = key.toLowerCase(); - if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { - // pass through - buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; - } - } - - // patch the docker file with the with an optional given baseImage - return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) - // create the container image - .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) - // tag the container image - .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); -} - -function createWorkspace(p) { - return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) - .then(() => patchWorkspace(p)); -} - -function installWebDependencies(p) { - return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); -} - -function cleanUpWebDependencies(p) { - return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); -} - -function resolvePluginTypes(p) { - if (p.pluginType) { - return Promise.resolve(); // already resolved - } - if (p.additional.length === 0) { - return resolvePluginType(p, p.tmpDir); - } - return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); -} - -function testWebAdditionals(p) { - return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); -} - -function buildWeb(p) { - const hasAdditional = p.additional.length > 0; - - let step; - if (hasAdditional) { - step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); - } else { - step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); - } - // move to target directory - return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); -} - -function installPythonTestDependencies(p) { - console.log(chalk.yellow('create test environment')); - return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); -} - -function buildServer(p) { - let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); - for (const pi of p.additional) { - act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); - } - - // copy all together - act = act - .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) - .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); - - // copy main deploy thing and create a docker out of it - act = act - .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) - .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); - - return act; -} - -function downloadServerDataFiles(p) { - if (!argv.serial) { - return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); - } - // serial - let act = Promise.resolve(); - for (const d of p.data) { - act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); - } - return act; -} - -function cleanWorkspace(descs) { - console.log(chalk.yellow('clean workspace')); - return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); -} - if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -716,182 +422,59 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { + // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const dockerComposePatch = loadPatchFile(); - const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); - - if (fs.existsSync('.yo-rc.json')) { - fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); - } - fs.ensureDirSync('build'); - - const cleanUp = () => { - if (fs.existsSync('.yo-rc_tmp.json')) { - fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); - } - }; - - const catchProductBuild = (p, act) => { - // no chaining to keep error - act.catch((error) => { - p.error = error; - console.error('ERROR building ', p.name, error); - }); - return act; - }; - - const steps = { - clean: () => cleanWorkspace(descs), - prune: dockerRemoveImages, - compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), - push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), - summary: () => { + const descs = require('./phovea_product.json'); + const singleService = descs.length === 1; + const productName = pkg.name.replace('_product', ''); + + + fs.emptyDirAsync('build') + .then(dockerRemoveImages.bind(this, productName)) + // move my own .yo-rc.json to avoid a conflict + .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) + .then(() => { + const buildOne = (d, i) => { + d.additional = d.additional || []; //default values + d.data = d.data || []; + d.name = d.name || fromRepoUrl(d.repo); + d.label = d.label || d.name; + if (singleService) { + d.image = `${productName}:${pkg.version}`; + } else { + d.image = `${productName}/${d.label}:${pkg.version}`; + } + let wait = buildImpl(d, './tmp' + i); + wait.catch((error) => { + d.error = error; + console.error('ERROR building ', d, error); + }); + return wait; + }; + if (argv.serial) { + let r = Promise.resolve([]); + for (let i = 0; i < descs.length; ++i) { + r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); + } + return r; + } else { + return Promise.all(descs.map(buildOne)); + } + }) + .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) + .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) + .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) + .then(() => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); - cleanUp(); if (anyErrors) { process.exit(1); } - } - }; - - const webTypes = ['static', 'web']; - const serverTypes = ['api', 'service']; - - const chainProducts = []; - for (let i = 0; i < descs.length; ++i) { - const p = descs[i]; - const suffix = p.name; - const hasAdditional = p.additional.length > 0; - const isWeb = webTypes.includes(p.type); - const isServer = serverTypes.includes(p.type); - - if (!isWeb && !isServer) { - console.error(chalk.red('unknown product type: ' + p.type)); - continue; - } - - fs.ensureDirSync(p.tmpDir); - - // clone repo - const subSteps = []; - steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); - subSteps.push(`clone:${suffix}`); - - if (hasAdditional) { - // clone extras - const cloneKeys = []; - for (const pi of p.additional) { - const key = `clone:${suffix}:${pi.name}`; - steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); - cloneKeys.push(key); - } - - if (argv.serial) { - subSteps.push(...cloneKeys); - } else { - subSteps.push(strObject(cloneKeys)); - } - } - - const needsWorskpace = (isWeb && hasAdditional) || isServer; - steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; - - if (isWeb) { - steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); - } else { // server - steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); - } - steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; - steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); - steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); - steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; - steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); - steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); - - subSteps.push(`prepare:${suffix}`); - subSteps.push(`install:${suffix}`); - if (!argv.skipTests) { - subSteps.push(`test:${suffix}`); - } - subSteps.push(`build:${suffix}`); - if (isServer && p.data.length > 0) { - subSteps.push(`data:${suffix}`); - } - if (isWeb) { - subSteps.push(`postbuild:${suffix}`); - } - subSteps.push(`image:${suffix}`); - if (!argv.skipSaveImage) { - subSteps.push(`save:${suffix}`); - } - - steps[`product:${suffix}`] = subSteps; - subSteps.name = `product:${suffix}`; - chainProducts.push(subSteps); - } - - // create some meta steps - { - const stepNames = Object.keys(steps); - for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { - const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); - if (sub.length <= 0) { - continue; - } - steps[meta] = argv.serial ? sub : strObject(sub); - } - } - - const chain = ['clean']; - - if (!argv.skipCleanUp) { - chain.push('prune'); - } - - if (argv.serial) { - chain.push(...chainProducts); // serially - } else { - const par = {}; - chainProducts.forEach((c) => { - par[c.name] = c; - }); - chain.push(par); // as object = parallel - } - // result of the promise is an array of partial docker compose files - - chain.push('compose'); - if (argv.pushTo) { - chain.push('push'); - } - chain.push('summary'); - - // XX. catch all error handling - const catchErrors = (error) => { + }).catch((error) => { console.error('ERROR extra building ', error); - // rename back - cleanUp(); process.exit(1); - }; - - if (argv.help) { - showHelp(steps, chain); - cleanUp(); - process.exit(0); - } - - if (argv._.length > 0) { - // explicit chain replace computed one - chain.splice(0, chain.length, ...argv._); - } - - console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); - const toExecute = asChain(steps, chain); - const launch = runChain(toExecute, catchErrors); - if (!argv.dryRun) { - launch(); - } + }); } From c47196f8c0b9bed017a2ac6ea9a77dcce77b0430 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 09:04:15 +0100 Subject: [PATCH 16/34] remove _.travis.yml_ --- .travis.yml | 36 ------------------------------------ 1 file changed, 36 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f9b94b0..0000000 --- a/.travis.yml +++ /dev/null @@ -1,36 +0,0 @@ -language: node_js - -node_js: -- 6 - -services: -- docker - -before_install: -- export DISPLAY=:99.0 -- sh -e /etc/init.d/xvfb start -- if [[ `npm -v` != 3* ]]; then npm i -g npm@3; fi - -before_script: -- pip install --user awscli -- export PATH=$PATH:$HOME/.local/bin - -# build and push -#- $(aws ecr get-login --region eu-central-1) -#script: node build.js --skipTests --skipSaveImage --pushTo=$AWS_ECR_PREFIX --pushLatest --pushDaily - -script: node build.js --skipSaveImage - -deploy: - provider: releases - api_key: - secure: TK9/P34Bi3WuppiDrBCwVcn41yCBwmILaU8hXTBzUPbT7TbeFIwsC6/4CtH85Z+ZrUve4S5pTmWRNf2dQDxWw3uYu7+bJuemV2J1LHG76mognj+TNEiYxfLQUt3Gql4W7C7FcI4Rlx5/uMN9wY1wro8TWUBMwT6jjSrUWIvK3GXoojd5bHvJx07XpjWl9wCon4D0ruZiFoM2mdeP23lbc2GckETi32oEKswnQXxkMACmxbPzoWbvkxH4aK8Bt2Rj2sl2TbPhVkN6DAkHGkGAvLI+2/aRfG27+oo3OKsaDjbuGABct8TfZccJ970CbQ8kbnCjYxstvqkg1JWjF0W67sX/flBZZOEUA5l0OLWo6HqMGMxm7/lEQhIdPMsRmvXL+HVOxkMrB2dda58QzxVwiZp+rRqUaeabPZp8Kl5xodGrVxsBvxe6zAbJ5jCtCSumG6+kLyKI00/kYlghqQNrgUw0ZsYJlQ34h3lo/24QpaeyDpQoCkGWQgtgqiXGpeKSu7bCnOqIqAy3nbT9Utwj7K8gIasTG5idosEAz/THMampNbGDuyxxc340sYGNMg9Bhm1g2ILWRdtV470p5hwBtIDTKi3/PAizEO26+Wh0zI47Sg3ao57avcbCsTmzbZUeA5J4bojmchhJCHX8su9cSCGh/2fJA/1eBIgEvOQ8LNE= - file: build/* - on: - tags: true - -notifications: - slack: - secure: E8/1UIdHSczUbN+6i6gd1d5LM4vmLdwLQ30tpyjvnM0wvfDce76oPxLJAy240WJ5ybXRZUtNrttpVpt4tEXCy8aLFCmxD7s77rVloH+q1J8R/ptTFWZGhFGEujk1awEmVbzcWxJkV9/JENQaeGBKxwv8/EQwWwEkAb7p/+AJb9owmH88b3wUZUGHBWtbMiyyaF4Rm1Wg1stJB8Z1Ga7PRF4cqufTgcDdsCPVv9gAY+VxOIGqX/Vfuc9UWpUH8vq8lHUE7Inn5QS78kuFfSgLWga3H6Mu/Gko1XNlWk0QWWQBUvEZ6ZC6Wuo68KzvUjJHDTnx8WyfHue2JNHIslcX+eJq2WHLeEgM24VeNkILCGo/H/60NGHiSjrIv/Y9h6bQ9FDjo6TUyE4nbdPYN1RN9FQ5UbI9Y4Gi753H9mqnHWlEywBOzHxdZCAuz9Wh03CCF/blsvJ+Obbyo6Jrfe+g44jyi9kQdBNQ78qG6v4EXws8FiYao6x3PpgIwFix42Cpr+soAh5FpA3C1zHSAyZZpXF65/lrDl5yPNofK7Wy0B9bw+0I6Z/u7ZKFNVZXvYPGYvtUVcsALGBdmYc61+LCta36Po0KZseWVAlJj6QnOJDYzv0wvV/zsuf9A5KpYFGiqV9Q7zmtiO5FYF5sBy+lE7O9tHVO4O18IRndhRQgxhs= - on_success: change - on_failure: always From 574ba81835fb638e950f016ad5f6299f51be0a0e Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Tue, 14 Jan 2020 13:50:01 +0100 Subject: [PATCH 17/34] Add `yeomanEnv.lookup()` in build.js See https://github.com/phovea/generator-phovea/pull/266 --- build.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/build.js b/build.js index d68016f..afd2f21 100644 --- a/build.js +++ b/build.js @@ -202,7 +202,9 @@ function yo(generator, options, cwd, args) { return new Promise((resolve, reject) => { try { console.log(cwd, chalk.blue('running yo phovea:' + generator)); - yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + yeomanEnv.lookup(() => { + yeomanEnv.run(`phovea:${generator} ${_args}`, options, resolve); + }); } catch (e) { console.error('error', e, e.stack); reject(e); From d872f394de9669ea1d4f42e0e5a3622b9a2ebfa8 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Tue, 14 Jan 2020 14:03:57 +0100 Subject: [PATCH 18/34] Add `--serial` arg to build.js in config.yml --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a0e689e..22c8b26 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -68,7 +68,7 @@ jobs: ;; esac echo "using tag: --${awsTag}--" - node build.js --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo + node build.js --serial --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo - store_artifacts: path: build destination: build From 2047d9b7014ed4a2b299c863d22a6b65b5f7033c Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 14:16:21 +0100 Subject: [PATCH 19/34] switch to branch develop --- phovea_product.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index 97dfbb4..43030c9 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -3,7 +3,7 @@ "type": "web", "label": "gapminder", "repo": "Caleydo/gapminder", - "branch": "master", + "branch": "develop", "additional": [] }, { @@ -37,7 +37,7 @@ { "type": "repo", "repo": "Caleydo/gapminder", - "branch": "master" + "branch": "develop" } ] } From 28974f9770a5d4f8a2f987084a18fcb0260d6544 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 14:17:30 +0100 Subject: [PATCH 20/34] switch to branch develop --- phovea_product.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/phovea_product.json b/phovea_product.json index 97dfbb4..43030c9 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -3,7 +3,7 @@ "type": "web", "label": "gapminder", "repo": "Caleydo/gapminder", - "branch": "master", + "branch": "develop", "additional": [] }, { @@ -37,7 +37,7 @@ { "type": "repo", "repo": "Caleydo/gapminder", - "branch": "master" + "branch": "develop" } ] } From 8da2b0b2282a8ee89f765311b7bab490a330c97d Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 14:36:53 +0100 Subject: [PATCH 21/34] replace `/' in branchname with underscore --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 957056b..04b8491 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -64,7 +64,7 @@ jobs: awsTag="latest" ;; *) - awsTag="${CIRCLE_BRANCH}" + awsTag="${CIRCLE_BRANCH//\//_}" ;; esac echo "using tag: --${awsTag}--" @@ -87,7 +87,7 @@ jobs: export AWS_DEFAULT_REGION=eu-central-1 # cleanup name by removing the _product suffix baseName=${CIRCLE_PROJECT_REPONAME%_product} - awsFamily="${baseName}-${CIRCLE_BRANCH}" + awsFamily="${baseName}-${CIRCLE_BRANCH//\//_}" echo "awsFamily --${awsFamily}--" tasksExists=$(aws --output text ecs list-task-definitions --family-prefix ${awsFamily}) echo "existsTaskDefinition? --${tasksExists}--" From a6e3c25e1abf95299d00c61f18e5f2cb5d4a4dd5 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 14:45:55 +0100 Subject: [PATCH 22/34] add comments for change --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 04b8491..a5a65e0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -64,7 +64,7 @@ jobs: awsTag="latest" ;; *) - awsTag="${CIRCLE_BRANCH//\//_}" + awsTag="${CIRCLE_BRANCH//\//_}" # replace `/` with `_` in branch name ;; esac echo "using tag: --${awsTag}--" @@ -87,7 +87,7 @@ jobs: export AWS_DEFAULT_REGION=eu-central-1 # cleanup name by removing the _product suffix baseName=${CIRCLE_PROJECT_REPONAME%_product} - awsFamily="${baseName}-${CIRCLE_BRANCH//\//_}" + awsFamily="${baseName}-${CIRCLE_BRANCH//\//_}" # replace `/` with `_` in branch name echo "awsFamily --${awsFamily}--" tasksExists=$(aws --output text ecs list-task-definitions --family-prefix ${awsFamily}) echo "existsTaskDefinition? --${tasksExists}--" From b667e92fbbabeb521cd7b3e9d18e808429c50948 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 14:46:43 +0100 Subject: [PATCH 23/34] update _build.js_ --- build.js | 745 +++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 580 insertions(+), 165 deletions(-) diff --git a/build.js b/build.js index afd2f21..f6283d2 100644 --- a/build.js +++ b/build.js @@ -7,7 +7,9 @@ const path = require('path'); const fs = Promise.promisifyAll(require('fs-extra')); const chalk = require('chalk'); const pkg = require('./package.json'); +// see show help const argv = require('yargs-parser')(process.argv.slice(2)); + const quiet = argv.quiet !== undefined; const now = new Date(); @@ -15,32 +17,116 @@ const prefix = (n) => n < 10 ? ('0' + n) : n.toString(); const buildId = `${now.getUTCFullYear()}${prefix(now.getUTCMonth())}${prefix(now.getUTCDate())}-${prefix(now.getUTCHours())}${prefix(now.getUTCMinutes())}${prefix(now.getUTCSeconds())}`; pkg.version = pkg.version.replace('SNAPSHOT', buildId); const env = Object.assign({}, process.env); +const productName = pkg.name.replace('_product', ''); + +function showHelp(steps, chain) { + console.info(`node build.js -- step1 step2 +possible options: + * --quiet ... reduce log messages + * --serial ... build elements sequentially + * --skipTests ... skip tests + * --injectVersion ... injects the product version into the package.json of the built component + * --useSSH ... clone via ssh instead of https + * --skipCleanUp ... skip cleaning up old docker images + * --skipSaveImage ... skip saving the generated docker images + * --pushTo ... push docker images to the given registry + * --noDefaultTags ... don't push generated default tag : + * --pushExtra ... push additional custom tag: e.g., --pushExtra=develop + * --forceLabel ... force to use the label even only a single service exists + * --dryRun ... just compute chain no execution + * --help ... show this help message + +arguments: (starting with --!) optional list of steps to execute in the given order (expert mode) by default the default chain is executed + `); + + steps = Object.keys(steps); + const primary = steps.filter((d) => !d.includes(':')).sort((a, b) => a.localeCompare(b)); + const secondary = steps.filter((d) => d.includes(':')).sort((a, b) => a.localeCompare(b)); + + console.info('possible primary steps:\n ', primary.join('\n ')); + console.info('possible secondary steps:\n ', secondary.join('\n ')); + + console.info('default chain:\n', JSON.stringify(chain, null, ' ')); +} +/** + * generates a repo url to clone depending on the argv.useSSH option + * @param {string} url the repo url either in git@ for https:// form + * @returns the clean repo url + */ function toRepoUrl(url) { + if (url.startsWith('git@')) { + if (argv.useSSH) { + return url; + } + // have an ssh url need an http url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `https://${m[3]}/${m[4]}.git`; + } + if (url.startsWith('http')) { + if (!argv.useSSH) { + return url; + } + // have a http url need an ssh url + const m = url.match(/(https?:\/\/([^/]+)\/|git@(.+):)([\w\d-_/]+)(.git)?/); + return `git@${m[2]}:${m[4]}.git`; + } + if (!url.includes('/')) { + url = `Caleydo/${url}`; + } if (argv.useSSH) { - return `git@github.com:${url}.git` + return `git@github.com:${url}.git`; } - return url.startsWith('https://github.com/') ? url : `https://github.com/${url}.git`; + return `https://github.com/${url}.git`; } +/** + * guesses the credentials environment variable based on the given repository hostname + * @param {string} repo + */ +function guessUserName(repo) { + // extract the host + const host = repo.match(/:\/\/([^/]+)/)[1]; + const hostClean = host.replace(/\./g, '_').toUpperCase(); + // e.g. GITHUB_COM_CREDENTIALS + const envVar = process.env[`${hostClean}_CREDENTIALS`]; + if (envVar) { + return envVar; + } + return process.env.PHOVEA_GITHUB_CREDENTIALS; +} function toRepoUrlWithUser(url) { const repo = toRepoUrl(url); - const username_and_password = process.env.PHOVEA_GITHUB_CREDENTIALS; - if (repo.includes('git@github.com') || !username_and_password) { + if (repo.startsWith('git@')) { // ssh return repo; } - return repo.replace('://', `://${username_and_password}@`); + const usernameAndPassword = guessUserName(repo); + if (!usernameAndPassword) { // ssh or no user given + return repo; + } + return repo.replace('://', `://${usernameAndPassword}@`); } - function fromRepoUrl(url) { if (url.includes('.git')) { - return url.match(/\/(.*)\.git/)[0] + return url.match(/\/([^/]+)\.git/)[0]; } return url.slice(url.lastIndexOf('/') + 1); } +/** + * deep merge with array union + * @param {*} target + * @param {*} source + */ +function mergeWith(target, source) { + const _ = require('lodash'); + const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; + _.mergeWith(target, source, mergeArrayUnion); + return target; +} + function downloadDataUrl(url, dest) { if (!url.startsWith('http')) { url = `https://s3.eu-central-1.amazonaws.com/phovea-data-packages/${url}`; @@ -49,7 +135,7 @@ function downloadDataUrl(url, dest) { console.log(chalk.blue('download file', url)); return new Promise((resolve, reject) => { const file = fs.createWriteStream(dest); - const request = http.get(url, (response) => { + http.get(url, (response) => { response.pipe(file); file.on('finish', () => { file.close(resolve); @@ -72,11 +158,13 @@ function downloadDataFile(desc, destDir, cwd) { url: desc }; } - switch(desc.type) { - case 'url': - const destName = toDownloadName(desc.url); - return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, path.join(destDir, destName))); - case 'repo': + desc.type = desc.type || (desc.url ? 'url' : (desc.repo ? 'repo' : 'unknown')); + switch (desc.type) { + case 'url': { + desc.name = desc.name || toDownloadName(desc.url); + return fs.ensureDirAsync(destDir).then(() => downloadDataUrl(desc.url, `${destDir}/${desc.name}`)); + } + case 'repo': { desc.name = desc.name || fromRepoUrl(desc.repo); let downloaded; if (fs.existsSync(path.join(cwd, desc.name))) { @@ -85,6 +173,7 @@ function downloadDataFile(desc, destDir, cwd) { downloaded = cloneRepo(desc, cwd); } return downloaded.then(() => fs.copyAsync(`${cwd}/${desc.name}/data`, `${destDir}/${desc.name}`)); + } default: console.error('unknown data type:', desc.type); return null; @@ -96,25 +185,36 @@ function downloadDataFile(desc, destDir, cwd) { * @param cmd command as array * @param args arguments * @param opts options + * @returns a promise with the result code or a reject with the error string */ function spawn(cmd, args, opts) { const spawn = require('child_process').spawn; const _ = require('lodash'); return new Promise((resolve, reject) => { - const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: ['ignore', 1, 2]}, opts)); + const p = spawn(cmd, typeof args === 'string' ? args.split(' ') : args, _.merge({stdio: argv.quiet ? ['ignore', 'pipe', 'pipe'] : ['ignore', 1, 2]}, opts)); + const out = []; + if (p.stdout) { + p.stdout.on('data', (chunk) => out.push(chunk)); + } + if (p.stderr) { + p.stderr.on('data', (chunk) => out.push(chunk)); + } p.on('close', (code, signal) => { if (code === 0) { console.info(cmd, 'ok status code', code, signal); resolve(code); } else { console.error(cmd, 'status code', code, signal); - reject(`${cmd} failed with status code ${code} ${signal}`); + if (args.quiet) { + // log output what has been captured + console.log(out.join('\n')); + } + reject(new Error(`${cmd} failed with status code ${code} ${signal}`)); } }); }); } - /** * run npm with the given args * @param cwd working directory @@ -151,23 +251,23 @@ function dockerSave(image, target) { p.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); p2.stderr.on('data', (data) => console.error(chalk.red(data.toString()))); } - p2.on('close', (code) => code == 0 ? resolve() : reject(code)); + p2.on('close', (code) => code === 0 ? resolve() : reject(code)); }); } -function dockerRemoveImages(productName) { - console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs docker rmi`)); +function dockerRemoveImages() { + console.log(chalk.blue(`docker images | grep ${productName} | awk '{print $1":"$2}') | xargs --no-run-if-empty docker rmi`)); const spawn = require('child_process').spawn; const opts = {env}; - return new Promise((resolve, reject) => { + return new Promise((resolve) => { const p = spawn('docker', ['images'], opts); const p2 = spawn('grep', [productName], opts); p.stdout.pipe(p2.stdin); const p3 = spawn('awk', ['{print $1":"$2}'], opts); p2.stdout.pipe(p3.stdin); - const p4 = spawn('xargs', ['docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); + const p4 = spawn('xargs', ['--no-run-if-empty', 'docker', 'rmi'], {env, stdio: [p3.stdout, 1, 2]}); p4.on('close', (code) => { - if (code == 0) { + if (code === 0) { resolve(); } else { console.log('invalid error code, but continuing'); @@ -197,7 +297,6 @@ function yo(generator, options, cwd, args) { const yeoman = require('yeoman-environment'); // call yo internally const yeomanEnv = yeoman.createEnv([], {cwd, env}, quiet ? createQuietTerminalAdapter() : undefined); - yeomanEnv.register(require.resolve('generator-phovea/generators/' + generator), 'phovea:' + generator); const _args = Array.isArray(args) ? args.join(' ') : args || ''; return new Promise((resolve, reject) => { try { @@ -217,6 +316,7 @@ function cloneRepo(p, cwd) { p.name = p.name || fromRepoUrl(p.repo); p.repo = p.repo || `phovea/${p.name}`; p.branch = p.branch || 'master'; + return yo('clone-repo', { branch: p.branch, extras: '--depth 1', @@ -226,39 +326,31 @@ function cloneRepo(p, cwd) { } function resolvePluginType(p, dir) { + if (!fs.existsSync(`${dir}/${p.name}/.yo-rc.json`)) { + p.pluginType = 'lib'; + p.isHybridType = false; + return; + } return fs.readJSONAsync(`${dir}/${p.name}/.yo-rc.json`).then((json) => { p.pluginType = json['generator-phovea'].type; p.isHybridType = p.pluginType.includes('-'); }); } -function preBuild(p, dir) { - const hasAdditional = p.additional.length > 0; - let act = fs.emptyDirAsync(dir) - .then(() => cloneRepo(p, dir)) - .then(() => resolvePluginType(p, dir)); - if (hasAdditional) { - act = act - .then(() => Promise.all(p.additional.map((pi) => cloneRepo(pi, dir).then(resolvePluginType.bind(this, pi, dir))))); - } - return act; -} - function loadComposeFile(dir, p) { const composeFile = `${dir}/${p.name}/deploy/docker-compose.partial.yml`; if (fs.existsSync(composeFile)) { const yaml = require('yamljs'); return fs.readFileAsync(composeFile).then((content) => yaml.parse(content.toString())); - } else { - return Promise.resolve({}); } + return Promise.resolve({}); } function patchComposeFile(p, composeTemplate) { const service = {}; if (composeTemplate && composeTemplate.services) { const firstService = Object.keys(composeTemplate.services)[0]; - //copy data from first service + // copy data from first service Object.assign(service, composeTemplate.services[firstService]); delete service.build; } @@ -274,104 +366,79 @@ function patchComposeFile(p, composeTemplate) { return r; } - -function postBuild(p, dir, buildInSubDir) { - return Promise.resolve(null) - .then(() => docker(`${dir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image} -f deploy/Dockerfile .`)) - .then(() => argv.skipSaveImage ? null : dockerSave(p.image, `build/${p.label}_image.tar.gz`)) - .then(() => Promise.all([loadComposeFile(dir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(dir, pi))))) - .then(mergeCompose); +function patchDockerfile(p, dockerFile) { + if (!p.baseImage) { + return null; + } + return fs.readFileAsync(dockerFile).then((content) => { + content = content.toString(); + // patch the Dockerfile by replacing the FROM statement + const r = /^\s*FROM (.+)\s*$/igm; + const fromImage = r.exec(content)[1]; + console.log(`patching ${dockerFile} change from ${fromImage} -> ${p.baseImage}`); + content = content.replace(r, `FROM ${p.baseImage}`); + return fs.writeFileAsync(dockerFile, content); + }); } -function buildWebApp(p, dir) { - console.log(dir, chalk.blue('building web application:'), p.label); - const name = p.name; - const hasAdditional = p.additional.length > 0; - let act = preBuild(p, dir); - //let act = Promise.resolve(null); - if (hasAdditional) { - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)) - .then(() => npm(dir, 'install')); - //test all modules - if (hasAdditional && !argv.skipTests) { - act = act.then(() => Promise.all(p.additional.map((pi) => npm(dir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`)))); +function patchWorkspace(p) { + // prepend docker_script in the workspace + if (fs.existsSync('./docker_script.sh')) { + console.log('patch workspace and prepend docker_script.sh'); + let content = fs.readFileSync('./docker_script.sh').toString(); + if (fs.existsSync(p.tmpDir + '/docker_script.sh')) { + content += '\n' + fs.readFileSync(p.tmpDir + '/docker_script.sh').toString(); } - act = act - .then(() => npm(dir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`)); - } else { - act = act - .then(() => npm(dir + '/' + name, 'install')) - .then(() => npm(dir + '/' + name, `run dist${p.isHybridType ? ':web' : ''}`)); + fs.writeFileSync(p.tmpDir + '/docker_script.sh', content); } - return act - .then(() => fs.renameAsync(`${dir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)) - .then(postBuild.bind(null, p, dir, true)); -} -function buildServerApp(p, dir) { - console.log(dir, chalk.blue('building service package:'), p.label); - const name = p.name; - - let act = preBuild(p, dir); - act = act - .then(() => yo('workspace', {noAdditionals: true}, dir)); - - if (!argv.skipTests) { - act = act - .then(() => console.log(chalk.yellow('create test environment'))) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: dir})) - .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: dir})); + if (argv.injectVersion) { + const pkgfile = `${p.tmpDir}/${p.name}/package.json`; + if (fs.existsSync(pkgfile)) { + const ppkg = require(pkgfile); + ppkg.version = pkg.version; + fs.writeJSONSync(pkgfile, ppkg); + } else { + console.warn('cannot inject version, main package.json not found'); + } } - act = act - .then(() => npm(dir + '/' + name, `run build${p.isHybridType ? ':python' : ''}`)) - .then(() => Promise.all(p.additional.map((pi) => npm(dir + '/' + pi.name, `run build${pi.isHybridType ? ':python' : ''}`)))); - - //copy all together - act = act - .then(() => fs.ensureDirAsync(`${dir}/build/source`)) - .then(() => fs.copyAsync(`${dir}/${name}/build/source`, `${dir}/build/source/`)) - .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${dir}/${pi.name}/build/source`, `${dir}/build/source/`)))); - - //copy data packages - act = act.then(() => Promise.all(p.data.map((d) => downloadDataFile(d, `${dir}/build/source/_data`, dir)))); - //let act = Promise.resolve([]); - - //copy main deploy thing and create a docker out of it - return act - .then(() => fs.ensureDirAsync(`${dir}/deploy`)) - .then(() => fs.copyAsync(`${dir}/${name}/deploy`, `${dir}/deploy/`)) - .then(postBuild.bind(null, p, dir, false)); -} - -function buildImpl(d, dir) { - switch (d.type) { - case 'static': - case 'web': - return buildWebApp(d, dir); - case 'api': - d.name = d.name || 'phovea_server'; - return buildServerApp(d, dir); - case 'service': - return buildServerApp(d, dir); - default: - console.error(chalk.red('unknown product type: ' + d.type)); - return Promise.resolve(null); + // inject extra phovea.js + if (fs.existsSync('./phovea.js')) { + console.log('patch workspace and add workspace phovea.js'); + let registry = fs.readFileSync(p.tmpDir + '/phovea_registry.js').toString(); + fs.copyFileSync('./phovea.js', p.tmpDir + '/phovea.js'); + + registry += `\n\n + import {register} from 'phovea_core/src/plugin'; + register('__product',require('./phovea.js')); + `; + fs.writeFileSync(p.tmpDir + '/phovea_registry.js', registry); } } function mergeCompose(composePartials) { let dockerCompose = {}; - const _ = require('lodash'); - const mergeArrayUnion = (a, b) => Array.isArray(a) ? _.union(a, b) : undefined; - composePartials.forEach((c) => _.mergeWith(dockerCompose, c, mergeArrayUnion)); + composePartials.forEach((c) => mergeWith(dockerCompose, c)); return dockerCompose; } -function buildCompose(descs, composePartials) { +function buildComposePartials(descs) { + const validDescs = descs.filter((d) => !d.error); + + // merge a big compose file including all + return Promise.all(validDescs.map((p) => { + return Promise.all([loadComposeFile(p.tmpDir, p).then(patchComposeFile.bind(null, p))].concat(p.additional.map((pi) => loadComposeFile(p.tmpDir, pi)))) + .then((partials) => { + p.composePartial = mergeCompose(partials); + }); + })); +} + +function buildCompose(descs, dockerComposePatch) { console.log('create docker-compose.yml'); - const dockerCompose = mergeCompose(composePartials); + + const dockerCompose = mergeCompose(descs.map((d) => d.composePartial).filter(Boolean)); const services = dockerCompose.services; // link the api server types to the web types and server to the api const web = descs.filter((d) => d.type === 'web').map((d) => d.label); @@ -388,6 +455,23 @@ function buildCompose(descs, composePartials) { services[w].links.push(`${s.label}:${s.name}`); }); }); + + if (services._host) { + // inline _host to apis + const host = services._host; + delete services._host; + api.forEach((s) => { + services[s] = mergeCompose([host, services[s]]); + }); + } + + Object.keys(dockerComposePatch.services).forEach((service) => { + if (services[service] !== undefined) { + console.log(`patch generated docker-compose file for ${service}`); + mergeWith(services[service], dockerComposePatch.services[service]); + } + }); + const yaml = require('yamljs'); return fs.writeFileAsync('build/docker-compose.yml', yaml.stringify(dockerCompose, 100, 2)) .then(() => dockerCompose); @@ -404,7 +488,7 @@ function pushImages(images) { if (!argv.noDefaultTags) { tags.push(...images.map((image) => ({image, tag: `${dockerRepository}/${image}`}))); } - if (argv.pushExtra) { //push additional custom prefix without the version + if (argv.pushExtra) { // push additional custom prefix without the version tags.push(...images.map((image) => ({ image, tag: `${dockerRepository}/${image.substring(0, image.lastIndexOf(':'))}:${argv.pushExtra}` @@ -417,6 +501,214 @@ function pushImages(images) { .then(() => Promise.all(tags.map((tag) => docker('.', `push ${tag.tag}`)))); } +function loadPatchFile() { + const existsYaml = fs.existsSync('./docker-compose-patch.yaml'); + if (!existsYaml && !fs.existsSync('./docker-compose-patch.yml')) { + return {services: {}}; + } + const content = fs.readFileSync(existsYaml ? './docker-compose-patch.yaml' : './docker-compose-patch.yml'); + const yaml = require('yamljs'); + const r = yaml.parse(content.toString()); + if (!r.services) { + r.services = {}; + } + return r; +} + +function fillDefaults(descs, dockerComposePatch) { + const singleService = descs.length === 1 && (argv.forceLabel === undefined); + + descs.forEach((d, i) => { + // default values + d.additional = d.additional || []; + d.data = d.data || []; + d.name = d.name || (d.repo ? fromRepoUrl(d.repo) : d.label); + d.label = d.label || d.name; + d.symlink = d.symlink || null; // default value + d.image = d.image || `${productName}${singleService ? '' : `/${d.label}`}:${pkg.version}`; + // incorporate patch file + if (dockerComposePatch.services[d.label] && dockerComposePatch.services[d.label].image) { + // use a different base image to build the item + d.baseImage = dockerComposePatch.services[d.label].image; + delete dockerComposePatch.services[d.label].image; + } + // include hint in the tmp directory which one is it + d.tmpDir = `./tmp${i}_${d.name.replace(/\s+/, '').slice(0, 5)}`; + }); + + return descs; +} + +function asChain(steps, chain) { + if (chain.length === 0) { + return []; + } + const possibleSteps = Object.keys(steps); + + const callable = (c) => { + if (typeof c === 'function') { + return c; + } + + if (typeof c === 'string') { + // simple lookup + if (!possibleSteps.includes(c)) { + console.error('invalid step:', c); + throw new Error('invalid step: ' + c); + } + return callable(steps[c]); + } + + if (Array.isArray(c)) { // sequential sub started + const sub = c.map(callable); + return () => { + console.log('run sequential sub chain: ', JSON.stringify(c, null, ' ')); + let step = Promise.resolve(); + for (const s of sub) { + step = step.then(s); + } + return step; + }; + } + // parallel = object + const sub = Object.keys(c).map((ci) => callable(c[ci])); + return () => { + console.log('run parallel sub chain: ', JSON.stringify(c, null, ' ')); + return Promise.all(sub.map((d) => d())); // run sub lazy combined with all + }; + }; + return chain.map(callable); +} + +function runChain(chain, catchErrors) { + let start = null; + let step = new Promise((resolve) => { + start = resolve; + }); + + for (const c of chain) { + step = step.then(c); + } + + step.catch(catchErrors); + + return () => { + start(); // resolve first to start chain + return step; // return last result + }; +} + +function strObject(items) { + const obj = {}; + for (const item of items) { + obj[item] = item; + } + return obj; +} + +function buildDockerImage(p) { + const buildInSubDir = p.type === 'web' || p.type === 'static'; + let buildArgs = ''; + // pass through http_proxy, no_proxy, and https_proxy env variables + for (const key of Object.keys(process.env)) { + const lkey = key.toLowerCase(); + if (lkey === 'http_proxy' || lkey === 'https_proxy' || lkey === 'no_proxy') { + // pass through + buildArgs += ` --build-arg ${lkey}='${process.env[key]}'`; + } + } + + // patch the docker file with the with an optional given baseImage + return Promise.resolve(patchDockerfile(p, `${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}/deploy/Dockerfile`)) + // create the container image + .then(() => docker(`${p.tmpDir}${buildInSubDir ? '/' + p.name : ''}`, `build -t ${p.image}${buildArgs} -f deploy/Dockerfile .`)) + // tag the container image + .then(() => argv.pushExtra ? docker(`${p.tmpDir}`, `tag ${p.image} ${p.image.substring(0, p.image.lastIndexOf(':'))}:${argv.pushExtra}`) : null); +} + +function createWorkspace(p) { + return yo('workspace', {noAdditionals: true, defaultApp: 'phovea'}, p.tmpDir) + .then(() => patchWorkspace(p)); +} + +function installWebDependencies(p) { + return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); +} + +function cleanUpWebDependencies(p) { + return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); +} + +function resolvePluginTypes(p) { + if (p.pluginType) { + return Promise.resolve(); // already resolved + } + if (p.additional.length === 0) { + return resolvePluginType(p, p.tmpDir); + } + return Promise.all([resolvePluginType(p, p.tmpDir)].concat(p.additional.map((pi) => resolvePluginType(pi, p.tmpDir)))); +} + +function testWebAdditionals(p) { + return Promise.all(p.additional.map((pi) => npm(p.tmpDir, `run test${pi.isHybridType ? ':web' : ''}:${pi.name}`))); +} + +function buildWeb(p) { + const hasAdditional = p.additional.length > 0; + + let step; + if (hasAdditional) { + step = npm(p.tmpDir, `run dist${p.isHybridType ? ':web' : ''}:${p.name}`); + } else { + step = npm(`${p.tmpDir}/${p.name}`, `run dist${p.isHybridType ? ':web' : ''}`); + } + // move to target directory + return step.then(() => fs.renameAsync(`${p.tmpDir}/${p.name}/dist/${p.name}.tar.gz`, `./build/${p.label}.tar.gz`)); +} + +function installPythonTestDependencies(p) { + console.log(chalk.yellow('create test environment')); + return spawn('pip', 'install --no-cache-dir -r requirements.txt', {cwd: p.tmpDir}) + .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); +} + +function buildServer(p) { + let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); + for (const pi of p.additional) { + act = act.then(() => npm(`${p.tmpDir}/${pi.name}`, `run build${pi.isHybridType ? ':python' : ''}`)); + } + + // copy all together + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/build/source`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/build/source`, `${p.tmpDir}/build/source/`)) + .then(() => Promise.all(p.additional.map((pi) => fs.copyAsync(`${p.tmpDir}/${pi.name}/build/source`, `${p.tmpDir}/build/source/`)))); + + // copy main deploy thing and create a docker out of it + act = act + .then(() => fs.ensureDirAsync(`${p.tmpDir}/deploy`)) + .then(() => fs.copyAsync(`${p.tmpDir}/${p.name}/deploy`, `${p.tmpDir}/deploy/`)); + + return act; +} + +function downloadServerDataFiles(p) { + if (!argv.serial) { + return Promise.all(p.data.map((d) => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir))); + } + // serial + let act = Promise.resolve(); + for (const d of p.data) { + act = act.then(() => downloadDataFile(d, `${p.tmpDir}/build/source/_data`, p.tmpDir)); + } + return act; +} + +function cleanWorkspace(descs) { + console.log(chalk.yellow('clean workspace')); + return Promise.all([fs.emptyDirAsync('build')].concat(descs.map((d) => fs.emptyDirAsync(d.tmpDir)))); +} + if (require.main === module) { if (argv.skipTests) { // if skipTest option is set, skip tests @@ -424,59 +716,182 @@ if (require.main === module) { env.PHOVEA_SKIP_TESTS = true; } if (argv.quiet) { - // if skipTest option is set, skip tests console.log(chalk.blue('will try to keep my mouth shut...')); } - const descs = require('./phovea_product.json'); - const singleService = descs.length === 1; - const productName = pkg.name.replace('_product', ''); - - - fs.emptyDirAsync('build') - .then(dockerRemoveImages.bind(this, productName)) - // move my own .yo-rc.json to avoid a conflict - .then(fs.renameAsync('.yo-rc.json', '.yo-rc_tmp.json')) - .then(() => { - const buildOne = (d, i) => { - d.additional = d.additional || []; //default values - d.data = d.data || []; - d.name = d.name || fromRepoUrl(d.repo); - d.label = d.label || d.name; - if (singleService) { - d.image = `${productName}:${pkg.version}`; - } else { - d.image = `${productName}/${d.label}:${pkg.version}`; - } - let wait = buildImpl(d, './tmp' + i); - wait.catch((error) => { - d.error = error; - console.error('ERROR building ', d, error); - }); - return wait; - }; - if (argv.serial) { - let r = Promise.resolve([]); - for (let i = 0; i < descs.length; ++i) { - r = r.then((arr) => buildOne(descs[i], i).then((f) => arr.concat(f))); - } - return r; - } else { - return Promise.all(descs.map(buildOne)); - } - }) - .then((composeFiles) => buildCompose(descs, composeFiles.filter((d) => !!d))) - .then(() => pushImages(descs.filter((d) => !d.error).map((d) => d.image))) - .then(() => fs.renameAsync('.yo-rc_tmp.json', '.yo-rc.json')) - .then(() => { + const dockerComposePatch = loadPatchFile(); + const descs = fillDefaults(require('./phovea_product.json'), dockerComposePatch); + + if (fs.existsSync('.yo-rc.json')) { + fs.renameSync('.yo-rc.json', '.yo-rc_tmp.json'); + } + fs.ensureDirSync('build'); + + const cleanUp = () => { + if (fs.existsSync('.yo-rc_tmp.json')) { + fs.renameSync('.yo-rc_tmp.json', '.yo-rc.json'); + } + }; + + const catchProductBuild = (p, act) => { + // no chaining to keep error + act.catch((error) => { + p.error = error; + console.error('ERROR building ', p.name, error); + }); + return act; + }; + + const steps = { + clean: () => cleanWorkspace(descs), + prune: dockerRemoveImages, + compose: () => buildComposePartials(descs).then(() => buildCompose(descs, dockerComposePatch)), + push: () => pushImages(descs.filter((d) => !d.error).map((d) => d.image)), + summary: () => { console.log(chalk.bold('summary: ')); const maxLength = Math.max(...descs.map((d) => d.name.length)); descs.forEach((d) => console.log(` ${d.name}${'.'.repeat(3 + (maxLength - d.name.length))}` + (d.error ? chalk.red('ERROR') : chalk.green('SUCCESS')))); const anyErrors = descs.some((d) => d.error); + cleanUp(); if (anyErrors) { process.exit(1); } - }).catch((error) => { + } + }; + + const webTypes = ['static', 'web']; + const serverTypes = ['api', 'service']; + + const chainProducts = []; + for (let i = 0; i < descs.length; ++i) { + const p = descs[i]; + const suffix = p.name; + const hasAdditional = p.additional.length > 0; + const isWeb = webTypes.includes(p.type); + const isServer = serverTypes.includes(p.type); + + if (!isWeb && !isServer) { + console.error(chalk.red('unknown product type: ' + p.type)); + continue; + } + + fs.ensureDirSync(p.tmpDir); + + // clone repo + const subSteps = []; + steps[`clone:${suffix}`] = () => catchProductBuild(p, cloneRepo(p, p.tmpDir)); + subSteps.push(`clone:${suffix}`); + + if (hasAdditional) { + // clone extras + const cloneKeys = []; + for (const pi of p.additional) { + const key = `clone:${suffix}:${pi.name}`; + steps[key] = () => catchProductBuild(p, cloneRepo(pi, p.tmpDir)); + cloneKeys.push(key); + } + + if (argv.serial) { + subSteps.push(...cloneKeys); + } else { + subSteps.push(strObject(cloneKeys)); + } + } + + const needsWorskpace = (isWeb && hasAdditional) || isServer; + steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; + + if (isWeb) { + steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); + } else { // server + steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); + } + steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; + steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); + steps[`data:${suffix}`] = () => catchProductBuild(p, downloadServerDataFiles(p)); + steps[`postbuild:${suffix}`] = isWeb ? () => catchProductBuild(p, cleanUpWebDependencies(p)) : () => null; + steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); + steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); + + subSteps.push(`prepare:${suffix}`); + subSteps.push(`install:${suffix}`); + if (!argv.skipTests) { + subSteps.push(`test:${suffix}`); + } + subSteps.push(`build:${suffix}`); + if (isServer && p.data.length > 0) { + subSteps.push(`data:${suffix}`); + } + if (isWeb) { + subSteps.push(`postbuild:${suffix}`); + } + subSteps.push(`image:${suffix}`); + if (!argv.skipSaveImage) { + subSteps.push(`save:${suffix}`); + } + + steps[`product:${suffix}`] = subSteps; + subSteps.name = `product:${suffix}`; + chainProducts.push(subSteps); + } + + // create some meta steps + { + const stepNames = Object.keys(steps); + for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { + const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); + if (sub.length <= 0) { + continue; + } + steps[meta] = argv.serial ? sub : strObject(sub); + } + } + + const chain = ['clean']; + + if (!argv.skipCleanUp) { + chain.push('prune'); + } + + if (argv.serial) { + chain.push(...chainProducts); // serially + } else { + const par = {}; + chainProducts.forEach((c) => { + par[c.name] = c; + }); + chain.push(par); // as object = parallel + } + // result of the promise is an array of partial docker compose files + + chain.push('compose'); + if (argv.pushTo) { + chain.push('push'); + } + chain.push('summary'); + + // XX. catch all error handling + const catchErrors = (error) => { console.error('ERROR extra building ', error); + // rename back + cleanUp(); process.exit(1); - }); + }; + + if (argv.help) { + showHelp(steps, chain); + cleanUp(); + process.exit(0); + } + + if (argv._.length > 0) { + // explicit chain replace computed one + chain.splice(0, chain.length, ...argv._); + } + + console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); + const toExecute = asChain(steps, chain); + const launch = runChain(toExecute, catchErrors); + if (!argv.dryRun) { + launch(); + } } From 94831d1b46e326334b40798e192b28aa80381b48 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 14:59:27 +0100 Subject: [PATCH 24/34] add logging for testing --- build.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/build.js b/build.js index f6283d2..e724e36 100644 --- a/build.js +++ b/build.js @@ -888,7 +888,8 @@ if (require.main === module) { chain.splice(0, chain.length, ...argv._); } - console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); + console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')) + console.log(chalk.blue('list of possible steps:'), JSON.stringify(steps, null, ' ')); const toExecute = asChain(steps, chain); const launch = runChain(toExecute, catchErrors); if (!argv.dryRun) { From 3e056ce586db0a2e70ac9a5bdc1702961abeab78 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Tue, 14 Jan 2020 15:03:25 +0100 Subject: [PATCH 25/34] fix step in build js --- build.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/build.js b/build.js index e724e36..8bc558f 100644 --- a/build.js +++ b/build.js @@ -798,7 +798,9 @@ if (require.main === module) { } const needsWorskpace = (isWeb && hasAdditional) || isServer; - steps[`prepare:${suffix}`] = needsWorskpace ? () => catchProductBuild(p, createWorkspace(p)) : null; + if(needsWorskpace) { + steps[`prepare:${suffix}`] = () => catchProductBuild(p, createWorkspace(p)); + } if (isWeb) { steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); From 49736c95364a9e95e7848dae10fc8e5845b1c487 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Wed, 15 Jan 2020 08:42:34 +0100 Subject: [PATCH 26/34] Fix prepare:gapminder step The prepare step is only necessary if a web product has additional dependencies or is a server product. In our case the web product `gapminder` does not hat additional dependencies, so the preparation step can be omitted. This fix checks if a workspace is needed and skips adding the prepare step to the chain otherwise. --- build.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/build.js b/build.js index 8bc558f..4a504ad 100644 --- a/build.js +++ b/build.js @@ -814,7 +814,9 @@ if (require.main === module) { steps[`image:${suffix}`] = () => catchProductBuild(p, buildDockerImage(p)); steps[`save:${suffix}`] = () => catchProductBuild(p, dockerSave(p.image, `build/${p.label}_image.tar.gz`)); - subSteps.push(`prepare:${suffix}`); + if(needsWorskpace) { + subSteps.push(`prepare:${suffix}`); + } subSteps.push(`install:${suffix}`); if (!argv.skipTests) { subSteps.push(`test:${suffix}`); From eefdb8968123197a9045a36a30e4dabdedb0d44f Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Wed, 15 Jan 2020 09:30:18 +0100 Subject: [PATCH 27/34] Remove --serial flag from build.js --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a5a65e0..fa3d6f9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -68,7 +68,7 @@ jobs: ;; esac echo "using tag: --${awsTag}--" - node build.js --serial --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo + node build.js --skipSaveImage --noDefaultTags --pushExtra=${awsTag} --pushTo=922145058410.dkr.ecr.eu-central-1.amazonaws.com/caleydo - store_artifacts: path: build destination: build From ecdbf003f321a42a1728c098950b93f781f506b5 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Wed, 15 Jan 2020 09:39:52 +0100 Subject: [PATCH 28/34] Remove unnecessary log --- build.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/build.js b/build.js index 4a504ad..e5ed314 100644 --- a/build.js +++ b/build.js @@ -892,8 +892,7 @@ if (require.main === module) { chain.splice(0, chain.length, ...argv._); } - console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')) - console.log(chalk.blue('list of possible steps:'), JSON.stringify(steps, null, ' ')); + console.log(chalk.blue('executing chain:'), JSON.stringify(chain, null, ' ')); const toExecute = asChain(steps, chain); const launch = runChain(toExecute, catchErrors); if (!argv.dryRun) { From d4aded0cf58f055712e6631018b9cb00783aca7d Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 10:37:31 +0100 Subject: [PATCH 29/34] add _phovea_product.schema.json_ --- phovea_product.schema.json | 173 +++++++++++++++++++++++++++++++++++++ 1 file changed, 173 insertions(+) create mode 100644 phovea_product.schema.json diff --git a/phovea_product.schema.json b/phovea_product.schema.json new file mode 100644 index 0000000..3ed1003 --- /dev/null +++ b/phovea_product.schema.json @@ -0,0 +1,173 @@ +{ + "$id": "phovea_product", + "type": "array", + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "items": { + "$id": "phovea_product/items", + "type": "object", + "required": [ + "type", + "repo" + ], + "properties": { + "type": { + "$id": "phovea_product/items/properties/type", + "type": "string", + "title": "the type of product to build", + "default": "", + "examples": [ + "web", + "service", + "api" + ] + }, + "name": { + "$id": "phovea_product/items/properties/name", + "type": "string", + "title": "name of the repo", + "default": "guesses from the repository", + "examples": [ + "ordino", + "server" + ] + }, + "label": { + "$id": "phovea_product/items/properties/label", + "type": "string", + "title": "product label and docker image label", + "default": "=name", + "examples": [ + "ordino", + "server" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/repo", + "type": "string", + "title": "repository to use", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + }, + "symlink": { + "$id": "phovea_product/items/properties/symlink", + "type": "string", + "title": "location relative to the product directory which contains the clone repository", + "description": "Note symbolic links will be created to ensure the proper structure", + "default": "", + "examples": [ + "../myclone" + ] + }, + "branch": { + "$id": "phovea_product/items/properties/branch", + "type": "string", + "title": "the branch, tag, or sha1 commit to use", + "default": "master", + "examples": [ + "master", + "v1.2.3", + "dc7486a472a987a2f6a38cd18b9b069487f1a4c8", + "develop" + ] + }, + "additional": { + "$id": "phovea_product/items/properties/additional", + "type": "array", + "description": "additional repositories that should be included in the build", + "items": { + "$id": "phovea_product/items/properties/additional/items", + "type": "object", + "required": [ + "nane", + "repo" + ], + "properties": { + "name": { + "$id": "phovea_product/items/properties/additional/items/properties/name", + "type": "string", + "title": "name of the repo", + "default": "", + "examples": [ + "phovea_core" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/additional/items/properties/repo", + "type": "string", + "title": "repository to use", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + }, + "symlink": { + "$id": "phovea_product/items/properties/symlink", + "type": "string", + "title": "location relative to the product directory which contains the clone repository", + "description": "Note symbolic links will be created to ensure the proper structure", + "default": "", + "examples": [ + "../myclone" + ] + }, + "branch": { + "$id": "phovea_product/items/properties/additional/items/properties/branch", + "type": "string", + "title": "the branch, tag, or sha1 commit to use", + "default": "master", + "examples": [ + "master", + "v1.2.3", + "dc7486a472a987a2f6a38cd18b9b069487f1a4c8", + "develop" + ] + } + } + } + }, + "data": { + "$id": "phovea_product/items/properties/data", + "type": "array", + "description": "a list of data files that should be included in /phovea/_data of the server build", + "items": { + "$id": "phovea_product/items/properties/data/items", + "type": "object", + "properties": { + "name": { + "$id": "phovea_product/items/properties/data/items/properties/name", + "type": "string", + "title": "name to store the file as", + "default": "derived from url or repo" + }, + "url": { + "$id": "phovea_product/items/properties/data/items/properties/url", + "type": "string", + "title": "url to a file to download", + "description": "if the file doesn't start with http... it is assumed that the file is relative to https://s3.eu-central-1.amazonaws.com/phovea-data-packages/", + "default": "", + "examples": [ + "test.h5", + "https://test.com/test.txt" + ] + }, + "repo": { + "$id": "phovea_product/items/properties/data/items/properties/repo", + "type": "string", + "title": "repository to clone that contains a data directory, the /data directory is cloned to /phovea/_data/", + "description": "either a full git url or in the form /", + "default": "", + "examples": [ + "Caleydo/ordino" + ] + } + } + } + } + } + } +} From e32b700f7415d452aacba81e9745ab0a60373b50 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Wed, 15 Jan 2020 12:43:45 +0100 Subject: [PATCH 30/34] switch branches for testing --- phovea_product.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/phovea_product.json b/phovea_product.json index 43030c9..e934b02 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,7 +10,7 @@ "type": "api", "label": "gapminder_server", "repo": "phovea/phovea_server", - "branch": "develop", + "branch": "stoiber/update_debian_url_in_Dockerfile", "additional": [ { "name": "phovea_security_flask", From 200deb6bdd3acb521a08786dbd859f5abd853bc4 Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 16 Jan 2020 07:27:35 +0100 Subject: [PATCH 31/34] add additional step to show installed dependencies during build --- build.js | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/build.js b/build.js index e5ed314..890b1bd 100644 --- a/build.js +++ b/build.js @@ -635,6 +635,10 @@ function installWebDependencies(p) { return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'install'); } +function showWebDependencies(p) { + return npm(p.additional.length > 0 ? p.tmpDir : (`${p.tmpDir}/${p.name}`), 'list --depth=1'); +} + function cleanUpWebDependencies(p) { return fs.emptyDirAsync(p.additional.length > 0 ? `${p.tmpDir}/node_modules` : (`${p.tmpDir}/${p.name}/node_modules`)); } @@ -672,6 +676,10 @@ function installPythonTestDependencies(p) { .then(() => spawn('pip', 'install --no-cache-dir -r requirements_dev.txt', {cwd: p.tmpDir})); } +function showPythonTestDependencies(p) { + return spawn('pip', 'list', {cwd: p.tmpDir}); +} + function buildServer(p) { let act = npm(`${p.tmpDir}/${p.name}`, `run build${p.isHybridType ? ':python' : ''}`); for (const pi of p.additional) { @@ -804,8 +812,10 @@ if (require.main === module) { if (isWeb) { steps[`install:${suffix}`] = () => catchProductBuild(p, installWebDependencies(p)); + steps[`show:${suffix}`] = () => catchProductBuild(p, showWebDependencies(p)); } else { // server steps[`install:${suffix}`] = argv.skipTests ? () => null : () => catchProductBuild(p, installPythonTestDependencies(p)); + steps[`show:${suffix}`] = () => catchProductBuild(p, showPythonTestDependencies(p)); } steps[`test:${suffix}`] = isWeb && hasAdditional ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => testWebAdditionals(p))) : () => null; steps[`build:${suffix}`] = isWeb ? () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildWeb(p))) : () => catchProductBuild(p, resolvePluginTypes(p).then(() => buildServer(p))); @@ -818,6 +828,8 @@ if (require.main === module) { subSteps.push(`prepare:${suffix}`); } subSteps.push(`install:${suffix}`); + subSteps.push(`show:${suffix}`); + if (!argv.skipTests) { subSteps.push(`test:${suffix}`); } @@ -841,7 +853,7 @@ if (require.main === module) { // create some meta steps { const stepNames = Object.keys(steps); - for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install']) { + for (const meta of ['clone', 'prepare', 'build', 'test', 'postbuild', 'image', 'product', 'install', 'show']) { const sub = stepNames.filter((d) => d.startsWith(`${meta}:`)); if (sub.length <= 0) { continue; From 6db66d1da18faff4230837b40593c04fce144cef Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 16 Jan 2020 08:41:06 +0100 Subject: [PATCH 32/34] revert to develop branch --- phovea_product.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/phovea_product.json b/phovea_product.json index e934b02..43030c9 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -10,7 +10,7 @@ "type": "api", "label": "gapminder_server", "repo": "phovea/phovea_server", - "branch": "stoiber/update_debian_url_in_Dockerfile", + "branch": "develop", "additional": [ { "name": "phovea_security_flask", From 117bf8058a576ccfbad3d1d00a7e5fea6a9d1dc1 Mon Sep 17 00:00:00 2001 From: Holger Stitz Date: Thu, 16 Jan 2020 16:57:45 +0100 Subject: [PATCH 33/34] Activate nightly build for develop branch --- .circleci/config.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fa3d6f9..621b6b7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -103,16 +103,16 @@ jobs: fi workflows: version: 2 -# build-nightly: -# triggers: -# - schedule: -# cron: "15 1 * * 1-5" # "At 01:15 on every day-of-week from Monday through Friday.”, see: https://crontab.guru/#15_1_*_*_1-5 -# filters: -# branches: -# only: -# - develop -# jobs: -# - build + build-nightly: + triggers: + - schedule: + cron: "15 1 * * 1-5" # "At 01:15 on every day-of-week from Monday through Friday.”, see: https://crontab.guru/#15_1_*_*_1-5 + filters: + branches: + only: + - develop + jobs: + - build build-branch: jobs: - build: From 0fc743efc948978441e5efe1240fee3737dbb72b Mon Sep 17 00:00:00 2001 From: dvvanessastoiber Date: Thu, 16 Jan 2020 17:10:23 +0100 Subject: [PATCH 34/34] Prepare release 2.0.0 --- package.json | 4 ++-- phovea_product.json | 14 +++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/package.json b/package.json index 829e1cf..0b123a3 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "gapminder_product", "description": "", "homepage": "https://phovea.caleydo.org", - "version": "1.0.0-SNAPSHOT", + "version": "2.0.0", "author": { "name": "datavisyn", "email": "contact@datavisyn.io", @@ -25,7 +25,7 @@ "bluebird": "3.4.6", "chalk": "1.1.3", "fs-extra": "^1.0.0", - "generator-phovea": "github:phovea/generator-phovea#develop", + "generator-phovea": "github:phovea/generator-phovea#v3.1.0", "lodash": "4.17.14", "mkdirp": "0.5.1", "yamljs": "0.2.8", diff --git a/phovea_product.json b/phovea_product.json index 43030c9..f834b10 100644 --- a/phovea_product.json +++ b/phovea_product.json @@ -3,41 +3,41 @@ "type": "web", "label": "gapminder", "repo": "Caleydo/gapminder", - "branch": "develop", + "branch": "v2.0.0", "additional": [] }, { "type": "api", "label": "gapminder_server", "repo": "phovea/phovea_server", - "branch": "develop", + "branch": "v4.0.1", "additional": [ { "name": "phovea_security_flask", "repo": "phovea/phovea_security_flask", - "branch": "develop" + "branch": "v4.0.0" }, { "name": "phovea_data_redis", "repo": "phovea/phovea_data_redis", - "branch": "develop" + "branch": "v4.0.0" }, { "name": "phovea_data_mongo", "repo": "phovea/phovea_data_mongo", - "branch": "develop" + "branch": "v4.0.0" }, { "name": "phovea_clue", "repo": "phovea/phovea_clue", - "branch": "develop" + "branch": "v4.0.0" } ], "data": [ { "type": "repo", "repo": "Caleydo/gapminder", - "branch": "develop" + "branch": "v2.0.0" } ] }