diff --git a/.dockerignore b/.dockerignore index 0df9afd3ade..3f55d4808ea 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,2 +1,5 @@ **/.* **/dist +**/target +**/*.aci +services/nginz/src/objs diff --git a/.gitignore b/.gitignore index 8046df8a760..e37a41c9425 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,8 @@ Cargo.lock *.asc *.tmp *~ +.#* +*#*# .*.sw[a-z] .cabal-sandbox ID @@ -60,3 +62,5 @@ integration-aws.yaml DOCKER_ID* swagger-ui services/spar/spar.cabal +deploy/services-demo/resources/templates/* +deploy/services-demo/conf/nginz/zwagger-ui/* diff --git a/CHANGELOG.md b/CHANGELOG.md index 2910fe63c54..1db5a8c9193 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,30 @@ +# 2019-01-27 #596 + +## API changes + + * Track inviters of team members (#566) + * New partner role. (#569, #572, #573, #576, #579, #584, #577, #592) + * App-level websocket pongs. (#561) + +## Bug fixes + + * Spar re-login deleted sso users; fix handling of brig errors. (#588) + * Gundeck: lost push notifications with push-all enabled. (#554) + * Gundeck: do not push natively to devices if they are not on the whitelist. (#554) + * Gundeck: link gundeck unit tests with -threaded. (#554) + +## Internal changes + + * Get rid of async-pool (unliftio now provides the same functionality) (#568) + * Fix: log multi-line error messages on one line. (#595) + * Whitelist all wire.com email addresses (#578) + * SCIM -> Scim (#581) + * Changes to make the demo runnable from Docker (#571) + * Feature/docker image consistency (#570) + * add a readme, for how to build libzauth. (#591) + * better support debian style machines of different architecturs (#582, #587, #583, #585, #590, #580) + + # 2019-01-10 #567 ## API changes diff --git a/Makefile b/Makefile index 7d4ce9370a9..96c206e9221 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ SHELL := /usr/bin/env bash LANG := en_US.UTF-8 -DOCKER_USER ?= wireserver +DOCKER_USER ?= quay.io/wire DOCKER_TAG ?= local default: fast @@ -89,33 +89,51 @@ i-%: .PHONY: docker-deps docker-deps: + # `docker-deps` needs to be built or pulled only once (unless native dependencies change) $(MAKE) -C build/alpine deps .PHONY: docker-builder docker-builder: + # `docker-builder` needs to be built or pulled only once (unless native dependencies change) $(MAKE) -C build/alpine builder .PHONY: docker-intermediate docker-intermediate: - docker build -t $(DOCKER_USER)/intermediate:$(DOCKER_TAG) -f build/alpine/Dockerfile.intermediate .; - docker tag $(DOCKER_USER)/intermediate:$(DOCKER_TAG) $(DOCKER_USER)/intermediate:latest; - if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/intermediate:$(DOCKER_TAG); docker push $(DOCKER_USER)/intermediate:latest; fi; + # `docker-intermediate` needs to be built whenever code changes - this essentially runs `stack clean && stack install` on the whole repo + docker build -t $(DOCKER_USER)/alpine-intermediate:$(DOCKER_TAG) -f build/alpine/Dockerfile.intermediate --build-arg intermediate=$(DOCKER_USER)/alpine-intermediate --build-arg deps=$(DOCKER_USER)/alpine-deps .; + docker tag $(DOCKER_USER)/alpine-intermediate:$(DOCKER_TAG) $(DOCKER_USER)/alpine-intermediate:latest; + if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/alpine-intermediate:$(DOCKER_TAG); docker push $(DOCKER_USER)/alpine-intermediate:latest; fi; .PHONY: docker-migrations docker-migrations: - docker build -t $(DOCKER_USER)/migrations:$(DOCKER_TAG) -f build/alpine/Dockerfile.migrations . + # `docker-migrations` needs to be built whenever docker-intermediate was rebuilt AND new schema migrations were added. + docker build -t $(DOCKER_USER)/migrations:$(DOCKER_TAG) -f build/alpine/Dockerfile.migrations --build-arg intermediate=$(DOCKER_USER)/alpine-intermediate --build-arg deps=$(DOCKER_USER)/alpine-deps . docker tag $(DOCKER_USER)/migrations:$(DOCKER_TAG) $(DOCKER_USER)/migrations:latest if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/migrations:$(DOCKER_TAG); docker push $(DOCKER_USER)/migrations:latest; fi; .PHONY: docker-exe-% docker-exe-%: - docker build -t $(DOCKER_USER)/"$*":$(DOCKER_TAG) -f build/alpine/Dockerfile.executable --build-arg executable="$*" . + docker image ls | grep $(DOCKER_USER)/alpine-deps > /dev/null || (echo "'make docker-deps' required.", exit 1) + docker image ls | grep $(DOCKER_USER)/alpine-intermediate > /dev/null || (echo "'make docker-intermediate' required."; exit 1) + docker build -t $(DOCKER_USER)/"$*":$(DOCKER_TAG) -f build/alpine/Dockerfile.executable --build-arg executable="$*" --build-arg intermediate=$(DOCKER_USER)/alpine-intermediate --build-arg deps=$(DOCKER_USER)/alpine-deps . docker tag $(DOCKER_USER)/"$*":$(DOCKER_TAG) $(DOCKER_USER)/"$*":latest if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/"$*":$(DOCKER_TAG); docker push $(DOCKER_USER)/"$*":latest; fi; -.PHONY: docker-service-% -docker-service-%: - $(MAKE) -C "services/$*" docker +.PHONY: docker-services +docker-services: + # make docker-services doesn't compile, only makes small images out of the `docker-intermediate` image + # to recompile, run `docker-intermediate` first. + docker image ls | grep $(DOCKER_USER)/alpine-deps > /dev/null || (echo "'make docker-deps' required.", exit 1) + docker image ls | grep $(DOCKER_USER)/alpine-intermediate > /dev/null || (echo "'make docker-intermediate' required."; exit 1) + # `make -C services/brig docker` == `make docker-exe-brig docker-exe-brig-integration docker-exe-brig-schema docker-exe-brig-index` + $(MAKE) -C services/brig docker + $(MAKE) -C services/gundeck docker + $(MAKE) -C services/galley docker + $(MAKE) -C services/cannon docker + $(MAKE) -C services/proxy docker + $(MAKE) -C services/spar docker + $(MAKE) docker-exe-zauth + $(MAKE) -C services/nginz docker DOCKER_DEV_NETWORK := --net=host DOCKER_DEV_VOLUMES := -v `pwd`:/src/wire-server diff --git a/README.md b/README.md index 0fc00c9a0db..af5c05aa752 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,7 @@ See more in "[Open sourcing Wire server code](https://medium.com/@wireapp/open-s * [1. Compile sources natively.](#1-compile-sources-natively) * [2. Use docker](#2-use-docker) * [How to run integration tests](#how-to-run-integration-tests) + * [when you need more fine-grained control over your build-test loops](#when-you-need-more-fine-grained-control-over-your-build-test-loops) * [How to install and run `wire-server`](#how-to-install-and-run-wire-server) @@ -109,7 +110,12 @@ For building nginz, see [services/nginz/README.md](services/nginz/README.md) If you wish to build your own docker images, you need [docker version >= 17.05](https://www.docker.com/) and [`make`](https://www.gnu.org/software/make/). Then, ```bash -make docker-services +# optionally: +# make docker-builder # if you don't run this, it pulls the alpine-builder image from quay.io +make docker-deps docker-intermediate docker-services + +# subsequent times, after changing code, if you wish to re-create docker images, it's sufficient to +make docker-intermediate docker-services ``` will, eventually, have built a range of docker images. Make sure to [give Docker enough RAM](https://github.com/wireapp/wire-server/issues/562); if you see `make: *** [builder] Error 137`, it might be a sign that the build ran out of memory. You can also mix and match – e.g. pull the [`alpine-builder`](https://quay.io/repository/wire/alpine-builder?tab=tags) image and build the rest locally. diff --git a/build/alpine/Dockerfile b/build/alpine/Dockerfile deleted file mode 100644 index f66e57599d8..00000000000 --- a/build/alpine/Dockerfile +++ /dev/null @@ -1,32 +0,0 @@ -# Requires docker >= 17.05 (requires support for multi-stage builds) -# Requires to have created the wire-server-builder and wire-server-deps docker images (run `make` in this directory) -# Usage example: -# (from wire-server root directory) -# SERVICE=galley; docker build -f build/alpine/Dockerfile -t $SERVICE --build-arg service=$SERVICE . - -ARG builder=quay.io/wire/alpine-builder -ARG deps=quay.io/wire/alpine-deps - -#--- Builder stage --- -FROM ${builder} as builder - -ARG service -ARG target=install - -# ensure no stale files remain if they get deleted from the branch. -RUN find /src/wire-server/ -maxdepth 1 -mindepth 1 | grep -v .stack- | xargs rm -rf - -COPY . /src/wire-server/ - -RUN cd /src/wire-server/services/${service} && make ${target} - -#--- Minified stage --- -FROM ${deps} - -ARG executable -COPY --from=builder /src/wire-server/dist/${executable} /usr/bin/${executable} - -# ARGs are not available at runtime, create symlink at build time -# more info: https://stackoverflow.com/questions/40902445/using-variable-interpolation-in-string-in-docker -RUN ln -s /usr/bin/${executable} /usr/bin/service -ENTRYPOINT ["/usr/bin/dumb-init", "--", "/usr/bin/service"] diff --git a/build/alpine/Dockerfile.executable b/build/alpine/Dockerfile.executable index de5d6631de3..d00e2dc3140 100644 --- a/build/alpine/Dockerfile.executable +++ b/build/alpine/Dockerfile.executable @@ -6,7 +6,7 @@ # (from wire-server root directory) # export EXECUTABLE=galley-schema; docker build -t $EXECUTABLE -f build/alpine/Dockerfile.executable --build-arg executable=$EXECUTABLE . -ARG intermediate=quay.io/wire/intermediate +ARG intermediate=quay.io/wire/alpine-intermediate ARG deps=quay.io/wire/alpine-deps #--- Intermediate stage --- diff --git a/build/alpine/README.md b/build/alpine/README.md index 6635b1ce4db..f42a12ebf98 100644 --- a/build/alpine/README.md +++ b/build/alpine/README.md @@ -7,24 +7,27 @@ To create docker images, you need to install [docker version >= 17.05](https://w Both of the above need to be built first (only once) to be able to actually build a service docker image. -* `Dockerfile` depends on the two images above to compile code inside the `builder` image and then copy the resulting binary into the `deps` image to finally construct a service (e.g. `brig`) docker image that can be used for testing or deployment. The final image is ~30MB (compressed) in size. +* `Dockerfile.intermediate` - based on `Dockerfile.deps`/`Dockerfile.builder`, this is an intermediate image compiling all dynamically linked binaries (obtained when running `make install` in the top-level directory). +* `Dockerfile.executable` - based on `Dockerfile.deps`/`Dockerfile.intermediate`, this extracts a single executable from the intermediate image, yielding a small image (~30MB compressed) with a single dynamically linked binary. + ### Build the `builder` and `deps` docker images locally +(from within the `wire-server` directory) ```bash -cd build/alpine && make +make docker-builder +make docker-deps ``` -### Build a service, e.g. brig: +### Build a service docker image, e.g. brig: ```bash -cd services/brig && make docker +make docker-intermediate # recompiles all the haskell code +make docker-exe-brig # this only extracts one binary from the intermediate image above and makes it the default entrypoint. Nothing gets recompiled ``` ## Other dockerfiles -* `Dockerfile.intermediate` - based on `Dockerfile.deps`/`Dockerfile.builder`, this is an intermediate image compiling all dynamically linked binaries (obtained when running `make install` in the top-level directory). -* `Dockerfile.executable` - based on `Dockerfile.deps`/`Dockerfile.intermediate`, this extracts a single executable from the intermediate image, yielding a small image with a single dynamically linked binary. * `Dockerfile.migrations` - same as `Dockerfile.executable`, with a fixed set of database migration binaries. * `Dockerfile.prebuilder` - dependencies of `Dockerfile.builder` that are expected to change very rarely (GHC, system libraries). Currently we're able to use system GHC, but if we require a newer version of GHC than the one provided by Alpine, we could build GHC in `Dockerfile.prebuilder` (as it has been [done before][2018-11-28]). diff --git a/deploy/docker-ephemeral/docker-compose.yaml b/deploy/docker-ephemeral/docker-compose.yaml index 53fd0c3e324..c15e885aae3 100644 --- a/deploy/docker-ephemeral/docker-compose.yaml +++ b/deploy/docker-ephemeral/docker-compose.yaml @@ -1,16 +1,27 @@ version: '2' +networks: + demo_wire: + external: false + services: fake_dynamodb: + container_name: demo_wire_dynamodb image: cnadiminti/dynamodb-local:2018-04-11 ports: - 127.0.0.1:4567:8000 + networks: + - demo_wire fake_sqs: + container_name: demo_wire_sqs image: airdock/fake-sqs:0.3.1 ports: - 127.0.0.1:4568:4568 + networks: + - demo_wire fake_localstack: + container_name: demo_wire_localstack image: localstack/localstack:0.8.0 # NB: this is younger than 0.8.6! ports: - 127.0.0.1:4569:4579 # ses # needed for local integration tests @@ -19,14 +30,20 @@ services: - DEBUG=1 - DEFAULT_REGION=eu-west-1 - SERVICES=ses,sns + networks: + - demo_wire basic_smtp: # needed for demo setup + container_name: demo_wire_smtp # https://github.com/namshi/docker-smtp image: namshi/smtp ports: - 127.0.0.1:2500:25 + networks: + - demo_wire fake_s3: + container_name: demo_wire_s3 image: minio/minio:RELEASE.2018-05-25T19-49-13Z ports: - "127.0.0.1:4570:9000" @@ -34,6 +51,8 @@ services: MINIO_ACCESS_KEY: dummykey MINIO_SECRET_KEY: dummysecret # minio requires a secret of at least 8 chars command: server /tmp + networks: + - demo_wire # activemq: # image: rmohr/activemq:5.15.4 @@ -41,22 +60,31 @@ services: # - "61613:61613" redis: + container_name: demo_wire_redis image: redis:3.0.7-alpine ports: - "127.0.0.1:6379:6379" + networks: + - demo_wire elasticsearch: + container_name: demo_wire_elasticsearch image: elasticsearch:5.6 # https://hub.docker.com/_/elastic is deprecated, but 6.2.4 did not work without further changes. # image: docker.elastic.co/elasticsearch/elasticsearch:6.2.4 ports: - "127.0.0.1:9200:9200" - "127.0.0.1:9300:9300" + networks: + - demo_wire cassandra: + container_name: demo_wire_cassandra image: cassandra:3.11.2 ports: - "127.0.0.1:9042:9042" + networks: + - demo_wire db_migrations_brig_schema: image: quay.io/wire/brig-schema @@ -67,6 +95,8 @@ services: - ./:/scripts links: - cassandra + networks: + - demo_wire db_migrations_brig_index: image: quay.io/wire/brig-index @@ -77,6 +107,8 @@ services: - ./:/scripts links: - elasticsearch + networks: + - demo_wire db_migrations_galley: image: quay.io/wire/galley-schema @@ -87,6 +119,8 @@ services: - ./:/scripts links: - cassandra + networks: + - demo_wire db_migrations_gundeck: image: quay.io/wire/gundeck-schema @@ -97,6 +131,8 @@ services: - ./:/scripts links: - cassandra + networks: + - demo_wire db_migrations_spar: image: quay.io/wire/spar-schema @@ -107,6 +143,8 @@ services: - ./:/scripts links: - cassandra + networks: + - demo_wire aws_cli: image: mesosphere/aws-cli:1.14.5 @@ -124,3 +162,5 @@ services: entrypoint: /scripts/init.sh volumes: - ./:/scripts + networks: + - demo_wire diff --git a/deploy/services-demo/README.md b/deploy/services-demo/README.md index 406600aa674..d90c227ae11 100644 --- a/deploy/services-demo/README.md +++ b/deploy/services-demo/README.md @@ -12,7 +12,8 @@ deploy/docker-ephemeral/run.sh ``` # On terminal 2, start the services -deploy/services-demo/demo.sh +deploy/services-demo/demo.sh # if all services have been compiled natively +deploy/services-demo/demo.sh docker # in case Docker images were built instead ``` ### Structure of the services-demo folder diff --git a/deploy/services-demo/conf/brig.demo-docker.yaml b/deploy/services-demo/conf/brig.demo-docker.yaml new file mode 100644 index 00000000000..c3a7e14c21b --- /dev/null +++ b/deploy/services-demo/conf/brig.demo-docker.yaml @@ -0,0 +1,103 @@ +brig: + host: brig + port: 8082 + +cassandra: + endpoint: + host: cassandra + port: 9042 + keyspace: brig_test + +elasticsearch: + url: http://elasticsearch:9200 + index: directory_test + +cargohold: + host: cargohold + port: 8084 + +galley: + host: galley + port: 8085 + +gundeck: + host: gundeck + port: 8086 + +aws: + prekeyTable: integration-brig-prekeys + sqsEndpoint: http://sqs:4568 # https://sqs.eu-west-1.amazonaws.com + dynamoDBEndpoint: http://dynamodb:8000 # https://dynamodb.eu-west-1.amazonaws.com + +internalEvents: + queueType: sqs + queueName: integration-brig-events-internal + +emailSMS: + email: + smtpEndpoint: + host: smtp + port: 25 + smtpConnType: plain + general: + templateDir: resources/templates + emailSender: backend-demo@mail.wiredemo.example.com + smsSender: "" + + user: + activationUrl: http://brig:8080/activate?key=${key}&code=${code} + smsActivationUrl: http://brig:8080/v/${code} + passwordResetUrl: http://brig:8080/password-reset/${key}?code=${code} + invitationUrl: http://brig:8080/register?invitation_code=${code} + deletionUrl: http://brig:8080/users/delete?key=${key}&code=${code} + + provider: + homeUrl: https://provider.localhost/ + providerActivationUrl: http://brig:8080/provider/activate?key=${key}&code=${code} + approvalUrl: http://brig:8080/provider/approve?key=${key}&code=${code} + approvalTo: success@simulator.amazonses.com + providerPwResetUrl: http://brig:8080/provider/password-reset?key=${key}&code=${code} + + team: + tInvitationUrl: http://brig:8080/register?team=${team}&team_code=${code} + tActivationUrl: http://brig:8080/register?team=${team}&team_code=${code} + tCreatorWelcomeUrl: http://brig:8080/creator-welcome-website + tMemberWelcomeUrl: http://brig:8080/member-welcome-website + +zauth: + privateKeys: resources/zauth/privkeys.txt + publicKeys: resources/zauth/pubkeys.txt + authSettings: + keyIndex: 1 + userTokenTimeout: 4838400 # 56 days + sessionTokenTimeout: 604800 # 7 days + accessTokenTimeout: 900 # 15 minutes + providerTokenTimeout: 604800 # 7 days + +turn: + serversV2: resources/turn/servers-v2.txt + servers: resources/turn/servers.txt + secret: resources/turn/secret.txt + # ^ TODO: This secret should be the same used by the TURN servers + configTTL: 3600 + tokenTTL: 21600 + +optSettings: + setTwilio: resources/twilio-credentials.yaml + setNexmo: resources/nexmo-credentials.yaml + setActivationTimeout: 1209600 # 1 day + setTeamInvitationTimeout: 1814400 # 21 days + setUserMaxConnections: 1000 + setCookieDomain: brig + setCookieInsecure: false + setUserCookieRenewAge: 1209600 # 14 days + setUserCookieLimit: 32 + setUserCookieThrottle: + stdDev: 3000 # 50 minutes + retryAfter: 86400 # 1 day + setDefaultLocale: en + setMaxTeamSize: 128 + setMaxConvSize: 128 + +logLevel: Debug +logNetStrings: false diff --git a/deploy/services-demo/conf/cannon.demo-docker.yaml b/deploy/services-demo/conf/cannon.demo-docker.yaml new file mode 100644 index 00000000000..bdaa2be9e7e --- /dev/null +++ b/deploy/services-demo/conf/cannon.demo-docker.yaml @@ -0,0 +1,11 @@ +cannon: + host: cannon + port: 8083 + externalHost: cannon + +gundeck: + host: gundeck + port: 8086 + +logLevel: Info +logNetStrings: false diff --git a/deploy/services-demo/conf/cargohold.demo-docker.yaml b/deploy/services-demo/conf/cargohold.demo-docker.yaml new file mode 100644 index 00000000000..3a966f678f4 --- /dev/null +++ b/deploy/services-demo/conf/cargohold.demo-docker.yaml @@ -0,0 +1,13 @@ +cargohold: + host: cargohold + port: 8084 + +aws: + keyId: dummykey + secretKey: dummysecret + s3Bucket: dummy-bucket + s3Endpoint: http://s3:4570 # https://s3-eu-west-1.amazonaws.com:443 + +settings: + maxTotalBytes: 27262976 + downloadLinkTTL: 300 # Seconds diff --git a/deploy/services-demo/conf/galley.demo-docker.yaml b/deploy/services-demo/conf/galley.demo-docker.yaml new file mode 100644 index 00000000000..b0a57cb5259 --- /dev/null +++ b/deploy/services-demo/conf/galley.demo-docker.yaml @@ -0,0 +1,31 @@ +galley: + host: galley + port: 8085 + +cassandra: + endpoint: + host: cassandra + port: 9042 + keyspace: galley_test + +brig: + host: brig + port: 8082 + +gundeck: + host: gundeck + port: 8086 + +spar: + host: spar + port: 8088 + +settings: + httpPoolSize: 1024 + maxTeamSize: 128 + maxConvSize: 128 + intraListing: false + conversationCodeURI: https://cannon/join/ + +logLevel: Info +logNetStrings: false diff --git a/deploy/services-demo/conf/gundeck.demo-docker.yaml b/deploy/services-demo/conf/gundeck.demo-docker.yaml new file mode 100644 index 00000000000..3a35a4de5e3 --- /dev/null +++ b/deploy/services-demo/conf/gundeck.demo-docker.yaml @@ -0,0 +1,26 @@ +gundeck: + host: gundeck + port: 8086 + +cassandra: + endpoint: + host: cassandra + port: 9042 + keyspace: gundeck_test + +redis: + host: redis + port: 6379 + +aws: + queueName: integration-gundeck-events + region: eu-west-1 + account: "123456789012" # Default account nr used by localstack + arnEnv: integration + sqsEndpoint: http://sqs:4568 # https://sqs.eu-west-1.amazonaws.com + snsEndpoint: http://sns:4575 # https://sns.eu-west-1.amazonaws.com + +settings: + httpPoolSize: 1024 + notificationTTL: 24192200 + bulkPush: false diff --git a/deploy/services-demo/conf/nginz/nginx-docker.conf b/deploy/services-demo/conf/nginz/nginx-docker.conf new file mode 100644 index 00000000000..4c52e9a1e1c --- /dev/null +++ b/deploy/services-demo/conf/nginz/nginx-docker.conf @@ -0,0 +1,375 @@ +worker_processes 4; +worker_rlimit_nofile 1024; +pid /tmp/nginz.pid; +daemon off; + +# nb. start up errors (eg. misconfiguration) may still end up in /$(LOG_PATH)/error.log +error_log stderr warn; + +events { + worker_connections 1024; + multi_accept off; +} + +http { + # + # Some temporary paths (by default, will use the `prefix` path given when starting nginx) + # + + client_body_temp_path /tmp; + fastcgi_temp_path /tmp; + proxy_temp_path /tmp; + scgi_temp_path /tmp; + uwsgi_temp_path /tmp; + + # + # Sockets + # + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + + # + # Timeouts + # + + client_body_timeout 60; + client_header_timeout 60; + keepalive_timeout 75; + send_timeout 60; + + ignore_invalid_headers off; + + types_hash_max_size 2048; + + server_names_hash_bucket_size 64; + server_name_in_redirect off; + + large_client_header_buffers 4 8k; + + + # + # Security + # + + server_tokens off; + + # + # Logging + # + + log_format custom_zeta '$remote_addr - $remote_user [$time_local] "$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" "$http_x_forwarded_for" - $connection $request_time $upstream_response_time $upstream_cache_status $zauth_user $zauth_connection $request_id $proxy_protocol_addr'; + access_log /dev/stdout custom_zeta; + + # + # Monitoring + # + vhost_traffic_status_zone; + + # + # Gzip + # + + gzip on; + gzip_disable msie6; + gzip_vary on; + gzip_proxied any; + gzip_comp_level 6; + gzip_buffers 16 8k; + gzip_http_version 1.1; + gzip_min_length 1024; + gzip_types 'text/plain text/css application/json text/xml'; + + # + # Proxied Upstream Services + # + + include upstreams-docker; + + # + # Mapping for websocket connections + # + + map $http_upgrade $connection_upgrade { + websocket upgrade; + default ''; + } + + + + # + # Locations + # + + server { + listen 8080; + listen 8081; + + zauth_keystore resources/zauth/pubkeys.txt; + zauth_acl conf/nginz/zauth_acl.txt; + + location /status { + zauth off; + return 200; + } + + location /vts { + zauth off; + vhost_traffic_status_display; + vhost_traffic_status_display_format html; + } + + # + # Service Routing + # + + # Brig Endpoints + + rewrite ^/api-docs/users /users/api-docs?base_url=http://127.0.0.1:8080/ break; + + location /users/api-docs { + include common_response_no_zauth.conf; + proxy_pass http://brig; + } + + location /register { + include common_response_no_zauth.conf; + proxy_pass http://brig; + } + + location /access { + include common_response_no_zauth.conf; + proxy_pass http://brig; + } + + location /activate { + include common_response_no_zauth.conf; + proxy_pass http://brig; + } + + location /login { + include common_response_no_zauth.conf; + proxy_pass http://brig; + } + + location /self { + include common_response_with_zauth.conf; + proxy_pass http://brig; + } + + location /users { + include common_response_with_zauth.conf; + proxy_pass http://brig; + } + + location /search { + include common_response_with_zauth.conf; + proxy_pass http://brig; + } + + location /connections { + include common_response_with_zauth.conf; + proxy_pass http://brig; + } + + location /clients { + include common_response_with_zauth.conf; + proxy_pass http://brig; + } + + location /properties { + include common_response_with_zauth.conf; + proxy_pass http://brig; + } + + location /calls/config { + include common_response_with_zauth.conf; + proxy_pass http://brig; + } + # Cargohold Endpoints + + rewrite ^/api-docs/assets /assets/api-docs?base_url=http://127.0.0.1:8080/ break; + + location /assets/api-docs { + include common_response_no_zauth.conf; + proxy_pass http://cargohold; + } + + + location /assets { + include common_response_with_zauth.conf; + proxy_pass http://cargohold; + } + + location /bot/assets { + include common_response_with_zauth.conf; + proxy_pass http://cargohold; + } + + location /provider/assets { + include common_response_with_zauth.conf; + proxy_pass http://cargohold; + } + + # Galley Endpoints + + rewrite ^/api-docs/conversations /conversations/api-docs?base_url=http://127.0.0.1:8080/ break; + + location /conversations/api-docs { + include common_response_no_zauth.conf; + proxy_pass http://galley; + } + + + location /conversations { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + location ~* ^/conversations/([^/]*)/otr/messages { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + location /broadcast/otr/messages { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + location /bot/conversation { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + location /bot/messages { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + location ~* ^/teams$ { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + location ~* ^/teams/([^/]*)$ { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + location ~* ^/teams/([^/]*)/members(.*) { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + location ~* ^/teams/([^/]*)/conversations(.*) { + include common_response_with_zauth.conf; + proxy_pass http://galley; + } + + # Gundeck Endpoints + + rewrite ^/api-docs/push /push/api-docs?base_url=http://127.0.0.1:8080/ break; + + location /push/api-docs { + include common_response_no_zauth.conf; + proxy_pass http://gundeck; + } + + location /push { + include common_response_with_zauth.conf; + proxy_pass http://gundeck; + } + + location /presences { + include common_response_with_zauth.conf; + proxy_pass http://gundeck; + } + + location /notifications { + include common_response_with_zauth.conf; + proxy_pass http://gundeck; + } + + # Proxy Endpoints + + rewrite ^/api-docs/proxy /proxy/api-docs?base_url=http://127.0.0.1:8080/ break; + + location /proxy/api-docs { + include common_response_no_zauth.conf; + proxy_pass http://proxy; + } + + location /proxy { + include common_response_with_zauth.conf; + proxy_pass http://proxy; + } + + # Cannon Endpoints + + rewrite ^/api-docs/await /await/api-docs?base_url=http://127.0.0.1:8080/ break; + + location /await/api-docs { + include common_response_no_zauth.conf; + proxy_pass http://cannon; + } + + location /await { + include common_response_with_zauth.conf; + proxy_pass http://cannon; + + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_read_timeout 1h; + } + + # Spar Endpoints + + location /sso { + include common_response_no_zauth.conf; + proxy_pass http://spar; + } + + location /sso-initiate-bind { + include common_response_with_zauth.conf; + proxy_pass http://spar; + } + + location /identity-providers { + include common_response_with_zauth.conf; + proxy_pass http://spar; + } + + # + # Swagger Resource Listing + # + location /api-docs { + zauth off; + default_type application/json; + root conf/nginz/zwagger-ui; + index resources.json; + if ($request_method = 'OPTIONS') { + add_header 'Access-Control-Allow-Methods' "GET, POST, PUT, DELETE, OPTIONS"; + add_header 'Access-Control-Allow-Headers' "$http_access_control_request_headers, DNT,X-Mx-ReqToken,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type"; + add_header 'Content-Type' 'text/plain; charset=UTF-8'; + add_header 'Content-Length' 0; + return 204; + } + more_set_headers 'Access-Control-Allow-Origin: $http_origin'; + } + + # Swagger UI + + location /swagger-ui { + zauth off; + gzip off; + alias conf/nginz/zwagger-ui; + types { + application/javascript js; + text/css css; + text/html html; + image/png png; + } + } + } +} diff --git a/deploy/services-demo/conf/nginz/upstreams-docker b/deploy/services-demo/conf/nginz/upstreams-docker new file mode 100644 index 00000000000..2351a238e34 --- /dev/null +++ b/deploy/services-demo/conf/nginz/upstreams-docker @@ -0,0 +1,35 @@ +upstream cargohold { + least_conn; + keepalive 32; + server cargohold:8084 max_fails=3 weight=1; +} +upstream gundeck { + least_conn; + keepalive 32; + server gundeck:8086 max_fails=3 weight=1; +} +upstream cannon { + least_conn; + keepalive 32; + server cannon:8083 max_fails=3 weight=1; +} +upstream galley { + least_conn; + keepalive 32; + server galley:8085 max_fails=3 weight=1; +} +upstream proxy { + least_conn; + keepalive 32; + server proxy:8087 max_fails=3 weight=1; +} +upstream brig { + least_conn; + keepalive 32; + server brig:8082 max_fails=3 weight=1; +} +upstream spar { + least_conn; + keepalive 32; + server spar:8088 max_fails=3 weight=1; +} diff --git a/deploy/services-demo/conf/nginz/zwagger-ui b/deploy/services-demo/conf/nginz/zwagger-ui deleted file mode 120000 index 0dc2036924f..00000000000 --- a/deploy/services-demo/conf/nginz/zwagger-ui +++ /dev/null @@ -1 +0,0 @@ -../../../../services/nginz/zwagger-ui/ \ No newline at end of file diff --git a/deploy/services-demo/conf/spar.demo-docker.yaml b/deploy/services-demo/conf/spar.demo-docker.yaml new file mode 100644 index 00000000000..717c6a61c8d --- /dev/null +++ b/deploy/services-demo/conf/spar.demo-docker.yaml @@ -0,0 +1,36 @@ +saml: + version: SAML2.0 + logLevel: Debug + + spHost: spar + spPort: 8088 + spAppUri: http://localhost:8080/ # <--- change this to point to a reachable web app + spSsoUri: http://localhost:8080/sso # <--- change this to the URL by which spar can be reached from an external IdP + + contacts: + - type: ContactBilling + company: Example Company + givenName: Example + surname: Company + email: email:company@example.com + +brig: + host: brig + port: 8082 + +galley: + host: galley + port: 8085 + +cassandra: + endpoint: + host: cassandra + port: 9042 + keyspace: spar_test + +maxttlAuthreq: 28800 # 8h +maxttlAuthresp: 28800 # 8h + +maxScimTokens: 16 + +logNetStrings: False # log using netstrings encoding (see http://cr.yp.to/proto/netstrings.txt) diff --git a/deploy/services-demo/demo.sh b/deploy/services-demo/demo.sh index 3888d988d95..4fb4eea8ca8 100755 --- a/deploy/services-demo/demo.sh +++ b/deploy/services-demo/demo.sh @@ -4,10 +4,15 @@ set -eo pipefail -USAGE="$0 [test-executable args...]" -EXE=$1 +USAGE="$0 [docker]" +MODE="$1" +docker_deployment="false" +if [ "$MODE" = "docker" ]; then + docker_deployment="true" +fi TOP_LEVEL="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../.." && pwd )" SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +DOCKER_FILE="$SCRIPT_DIR/docker-compose.yaml" DIR="${TOP_LEVEL}/services" PARENT_PID=$$ rm -f /tmp/demo.* # remove previous temp files, if any @@ -32,10 +37,20 @@ function kill_gracefully() { kill $(list_descendants $PARENT_PID) &> /dev/null } +function run_zauth() { + if [ "$docker_deployment" = "false" ]; then + ${DIR}/../dist/zauth "$@" + else + docker run --entrypoint "/usr/bin/zauth" ${docker_zauth_image:-quay.io/wire/zauth} $@ + fi +} + trap "kill_gracefully; kill_all" INT TERM ERR function check_secrets() { - test -f ${DIR}/../dist/zauth || { echo "zauth is not compiled. How about you run 'cd ${TOP_LEVEL} && make services' first?"; exit 1; } + if [ "$docker_deployment" = "false" ]; then + test -f ${DIR}/../dist/zauth || { echo "zauth is not compiled. How about you run 'cd ${TOP_LEVEL} && make services' first?"; exit 1; } + fi if [[ ! -f ${SCRIPT_DIR}/resources/turn/secret.txt ]]; then echo "Generate a secret for the TURN servers (must match the turn.secret key in brig's config)..." @@ -46,7 +61,7 @@ function check_secrets() { if [[ ! -f ${SCRIPT_DIR}/resources/zauth/privkeys.txt || ! -f ${SCRIPT_DIR}/resources/zauth/pubkeys.txt ]]; then echo "Generate private and public keys (used both by brig and nginz)..." TMP_KEYS=$(mktemp "/tmp/demo.keys.XXXXXXXXXXX") - ${DIR}/../dist/zauth -m gen-keypair -i 1 > $TMP_KEYS + run_zauth -m gen-keypair -i 1 > $TMP_KEYS cat $TMP_KEYS | sed -n 's/public: \(.*\)/\1/p' > ${SCRIPT_DIR}/resources/zauth/pubkeys.txt cat $TMP_KEYS | sed -n 's/secret: \(.*\)/\1/p' > ${SCRIPT_DIR}/resources/zauth/privkeys.txt else @@ -59,15 +74,17 @@ function check_prerequisites() { && nc -z 127.0.0.1 9200 \ && nc -z 127.0.0.1 6379 \ || { echo "Databases not up. Maybe run 'deploy/docker-ephemeral/run.sh' in a separate terminal first?"; exit 1; } - test -f ${DIR}/../dist/brig \ - && test -f ${DIR}/../dist/galley \ - && test -f ${DIR}/../dist/cannon \ - && test -f ${DIR}/../dist/gundeck \ - && test -f ${DIR}/../dist/cargohold \ - && test -f ${DIR}/../dist/proxy \ - && test -f ${DIR}/../dist/spar \ - && test -f ${DIR}/../dist/nginx \ - || { echo "Not all services are compiled. How about you run 'cd ${TOP_LEVEL} && make services' first?"; exit 1; } + if [ "$docker_deployment" = "false" ]; then + test -f ${DIR}/../dist/brig \ + && test -f ${DIR}/../dist/galley \ + && test -f ${DIR}/../dist/cannon \ + && test -f ${DIR}/../dist/gundeck \ + && test -f ${DIR}/../dist/cargohold \ + && test -f ${DIR}/../dist/proxy \ + && test -f ${DIR}/../dist/spar \ + && test -f ${DIR}/../dist/nginx \ + || { echo "Not all services are compiled. How about you run 'cd ${TOP_LEVEL} && make services' first?"; exit 1; } + fi } blue=6 @@ -95,6 +112,22 @@ function run_nginz() { | sed -e "s/^/$(tput setaf ${colour})[nginz] /" -e "s/$/$(tput sgr0)/" & } +function copy_brig_templates() { + # Need to copy over the templates from Brig since symlinking does not + # work with Docker + mkdir -p "${SCRIPT_DIR}/resources/templates" + cp -r "${SCRIPT_DIR}/../../services/brig/deb/opt/brig/templates/"* "${SCRIPT_DIR}/resources/templates/" +} + +function copy_nginz_configs() { + # Need to copy over the configs from Nginz since symlinking does not + # work with Docker + # ensure swagger UI files are downloaded befory copying + make -C "${TOP_LEVEL}/services/nginz" zwagger-ui/swagger-ui + mkdir -p "${SCRIPT_DIR}/conf/nginz/zwagger-ui" + cp -r "${SCRIPT_DIR}/../../services/nginz/zwagger-ui/"* "${SCRIPT_DIR}/conf/nginz/zwagger-ui/" +} + # brig,gundeck,galley use the amazonka library's 'Discover', which expects AWS credentials # even if those are not used/can be dummy values with the fake sqs/ses/etc containers used (see deploy/docker-ephemeral/docker-compose.yaml) export AWS_REGION=${AWS_REGION:-eu-west-1} @@ -103,15 +136,21 @@ export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-dummy} check_secrets check_prerequisites - -run_haskell_service brig ${green} -run_haskell_service galley ${yellow} -run_haskell_service gundeck ${blue} Info -run_haskell_service cannon ${orange} -run_haskell_service cargohold ${purpleish} Info -run_haskell_service proxy ${redish} Info -run_haskell_service spar ${orange} -run_nginz ${blueish} +copy_brig_templates +copy_nginz_configs + +if [ "$docker_deployment" = "false" ]; then + run_haskell_service brig ${green} + run_haskell_service galley ${yellow} + run_haskell_service gundeck ${blue} Info + run_haskell_service cannon ${orange} + run_haskell_service cargohold ${purpleish} Info + run_haskell_service proxy ${redish} Info + run_haskell_service spar ${orange} + run_nginz ${blueish} +else + docker-compose --file "$DOCKER_FILE" up +fi sleep 3 # wait a moment for services to start before continuing diff --git a/deploy/services-demo/docker-compose.yaml b/deploy/services-demo/docker-compose.yaml new file mode 100644 index 00000000000..5435a02de02 --- /dev/null +++ b/deploy/services-demo/docker-compose.yaml @@ -0,0 +1,160 @@ +networks: + docker-ephemeral_demo_wire: + external: true + +version: '2' +services: + brig: + image: quay.io/wire/brig + depends_on: + - cargohold + - galley + - gundeck + ports: + - 127.0.0.1:8082:8082 + volumes: + - ./:/configs + entrypoint: + - /usr/bin/brig + - -c + - /configs/conf/brig.demo-docker.yaml + working_dir: /configs + environment: + - AWS_REGION=eu-west-1 + - AWS_ACCESS_KEY_ID=dummy + - AWS_SECRET_ACCESS_KEY=dummy + external_links: + - demo_wire_cassandra:cassandra + - demo_wire_elasticsearch:elasticsearch + - demo_wire_dynamodb:dynamodb + - demo_wire_sqs:sqs + - demo_wire_smtp:smtp + networks: + - docker-ephemeral_demo_wire + + galley: + image: quay.io/wire/galley + ports: + - 127.0.0.1:8085:8085 + volumes: + - ./:/configs + entrypoint: + - /usr/bin/galley + - -c + - /configs/conf/galley.demo-docker.yaml + working_dir: /configs + environment: + - AWS_REGION=eu-west-1 + - AWS_ACCESS_KEY_ID=dummy + - AWS_SECRET_ACCESS_KEY=dummy + external_links: + - demo_wire_cassandra:cassandra + networks: + - docker-ephemeral_demo_wire + + gundeck: + image: quay.io/wire/gundeck + ports: + - 127.0.0.1:8086:8086 + volumes: + - ./:/configs + entrypoint: + - /usr/bin/gundeck + - -c + - /configs/conf/gundeck.demo-docker.yaml + working_dir: /configs + environment: + - AWS_REGION=eu-west-1 + - AWS_ACCESS_KEY_ID=dummy + - AWS_SECRET_ACCESS_KEY=dummy + external_links: + - demo_wire_redis:redis + - demo_wire_sqs:sqs + - demo_wire_localstack:sns + networks: + - docker-ephemeral_demo_wire + + cannon: + image: quay.io/wire/cannon + ports: + - 127.0.0.1:8083:8083 + volumes: + - ./:/configs + entrypoint: + - /usr/bin/cannon + - -c + - /configs/conf/cannon.demo-docker.yaml + working_dir: /configs + networks: + - docker-ephemeral_demo_wire + + cargohold: + image: quay.io/wire/cargohold + ports: + - 127.0.0.1:8084:8084 + volumes: + - ./:/configs + entrypoint: + - /usr/bin/cargohold + - -c + - /configs/conf/cargohold.demo-docker.yaml + working_dir: /configs + external_links: + - demo_wire_s3:s3 + networks: + - docker-ephemeral_demo_wire + + proxy: + image: quay.io/wire/proxy + ports: + - 127.0.0.1:8087:8087 + volumes: + - ./:/configs + entrypoint: + - /usr/bin/proxy + - -c + - /configs/conf/proxy.demo.yaml + working_dir: /configs + networks: + - docker-ephemeral_demo_wire + + spar: + image: quay.io/wire/spar + ports: + - 127.0.0.1:8088:8088 + volumes: + - ./:/configs + entrypoint: + - /usr/bin/spar + - -c + - /configs/conf/spar.demo-docker.yaml + working_dir: /configs + external_links: + - demo_wire_cassandra:cassandra + networks: + - docker-ephemeral_demo_wire + + nginz: + image: quay.io/wire/nginz + depends_on: + - cargohold + - gundeck + - cannon + - galley + - proxy + - brig + - spar + ports: + - 127.0.0.1:8080:8080 + - 127.0.0.1:8081:8081 + volumes: + - ./:/configs + entrypoint: + - /usr/sbin/nginx + - -p + - /configs + - -c + - /configs/conf/nginz/nginx-docker.conf + working_dir: /configs + networks: + - docker-ephemeral_demo_wire diff --git a/deploy/services-demo/resources/templates b/deploy/services-demo/resources/templates deleted file mode 120000 index 8b1a2c355e8..00000000000 --- a/deploy/services-demo/resources/templates +++ /dev/null @@ -1 +0,0 @@ -../../../services/brig/deb/opt/brig/templates \ No newline at end of file diff --git a/doc/Dependencies.md b/doc/Dependencies.md index 4d448a9b922..db7c06072ee 100644 --- a/doc/Dependencies.md +++ b/doc/Dependencies.md @@ -7,29 +7,25 @@ In addition to the information below, you can also consult the Dockerfiles for A * [alpine setup for Haskell services](../build/alpine/Dockerfile.builder) * [alpine setup for nginz](../services/nginz/Dockerfile) -### General Package dependencies (needed to compile Haskell services) +## General package dependencies (needed to compile Haskell services) -#### Fedora: +### Fedora: ```bash sudo dnf install -y pkgconfig haskell-platform libstdc++-devel libstdc++-static gcc-c++ libtool automake openssl-devel libsodium-devel ncurses-compat-libs libicu-devel GeoIP-devel libxml2-devel snappy-devel protobuf-compiler ``` -#### Debian: +### Ubuntu / Debian: -*Note: Debian is not recommended due to this issue when running local integration tests: [#327](https://github.com/wireapp/wire-server/issues/327)* +_Note_: Debian is not recommended due to this issue when running local integration tests: [#327](https://github.com/wireapp/wire-server/issues/327)*. This issue does not occur with Ubuntu. ```bash -sudo apt install pkg-config libsodium-dev openssl-dev libtool automake build-essential libicu-dev libsnappy-dev libgeoip-dev protobuf-compiler libxml2-dev -y +sudo apt install pkg-config libsodium-dev openssl-dev libtool automake build-essential libicu-dev libsnappy-dev libgeoip-dev protobuf-compiler libxml2-dev zlib1g-dev -y ``` If `openssl-dev` does not work for you, try `libssl-dev`. -#### Ubuntu: - -Hopefully almost like Debian. - -#### Arch: +### Arch: ``` # You might also need 'sudo pacman -S base-devel' if you haven't @@ -37,7 +33,7 @@ Hopefully almost like Debian. sudo pacman -S geoip snappy icu openssl ``` -#### macOS: +### macOS: ```bash brew install pkg-config libsodium openssl automake icu4c geoip snappy protobuf @@ -55,7 +51,24 @@ extra-lib-dirs: - /usr/local/opt/icu4c/lib ``` -### Haskell Stack +## Haskell Stack + +When you're done, ensure `stack --version` is >= 1.6.5 + +You may wish to make executables installed by stack available, by e.g. adding the following to your shell profile: + +```bash +export PATH=~/.local/bin:$PATH +``` + +### Ubuntu / Debian Unstable +_Note_: Debian stretch packages too old of a version of haskell-stack. It is recommended to retrieve the version available from testing, or unstable, or to use stack to update stack.(https://github.com/commercialhaskell/stack/issues/3686)* + +```bash +sudo apt install haskell-stack -y +``` + +### Generic ```bash curl -sSL https://get.haskellstack.org/ | sh @@ -63,23 +76,43 @@ curl -sSL https://get.haskellstack.org/ | sh wget -qO- https://get.haskellstack.org/ | sh ``` -Ensure `stack --version` is >= 1.6.5 - -You may wish to make executables installed by stack available, by e.g. adding the following to your shell profile: +## Rust +### Ubuntu / Debian ```bash -export PATH=~/.local/bin:$PATH +sudo apt install rustc cargo -y ``` -### Rust +### Generic ```bash curl https://sh.rustup.rs -sSf | sh source $HOME/.cargo/env ``` +## makedeb + +This is a tool to create debian-style binary packages. It is optional, and is only used if you want to install debian-style packages on your debian or ubuntu system. + +_Note_: If you want to build debian-style packages of cryptobox-c and other wire utilities, execute this step. otherwise, make sure to execute the 'Generic' version of the cryptobox-c step. + +```bash +git clone https://github.com/wireapp/wire-server && cd wire-server/tools/makedeb +export VERSION=0 +make dist +dpkg -i ../../dist/makedeb*.deb +``` + +## cryptobox-c -### cryptobox-c +### Ubuntu / Debian +```bash +git clone https://github.com/wireapp/cryptobox-c && cd cryptobox-c +make dist +dpkg -i target/release/cryptobox*.deb +``` + +### Generic ```bash export TARGET_LIB="$HOME/.wire-dev/lib" export TARGET_INCLUDE="$HOME/.wire-dev/include" @@ -107,16 +140,16 @@ extra-lib-dirs: - /.wire-dev/lib ``` -### makedeb +## Docker -Create debian packages, optional, only used in `make dist` +_Note_: While it is possible to use non-docker solutions to set up and configure this software, we recommend using docker and our provided docker images to configure dependent services rapidly, and ensure a consistent environment for all potential developers. +### Ubuntu / Debian Testing/Unstable: ```bash -git clone https://github.com/wireapp/wire-server && cd wire-server/tools/makedeb -stack install +sudo apt install docker.io docker-compose ``` -### Docker +### Generic: * [Install docker](https://docker.com) * [Install docker-compose](https://docs.docker.com/compose/install/) diff --git a/libs/brig-types/brig-types.cabal b/libs/brig-types/brig-types.cabal index c66fdd5fcc7..e01f4533311 100644 --- a/libs/brig-types/brig-types.cabal +++ b/libs/brig-types/brig-types.cabal @@ -92,6 +92,7 @@ test-suite brig-types-tests , base , brig-types , bytestring + , containers , currency-codes , galley-types , hostname-validate diff --git a/libs/brig-types/src/Brig/Types/Swagger.hs b/libs/brig-types/src/Brig/Types/Swagger.hs index ee27b344fba..987d917edd2 100644 --- a/libs/brig-types/src/Brig/Types/Swagger.hs +++ b/libs/brig-types/src/Brig/Types/Swagger.hs @@ -5,7 +5,9 @@ module Brig.Types.Swagger where import Imports import Data.Swagger import Data.Swagger.Build.Api +import Galley.Types.Teams (defaultRole) +import qualified Data.Swagger.Model.Api as Model import qualified Galley.Types.Swagger as Galley import qualified Galley.Types.Teams.Swagger as Galley @@ -375,13 +377,25 @@ invitationRequest = defineModel "InvitationRequest" $ do description "Locale to use for the invitation." optional +role :: DataType +role = Model.Prim $ Model.Primitive + { Model.primType = Model.PrimString + , Model.defaultValue = Just defaultRole + , Model.enum = Just [minBound..] + , Model.minVal = Just minBound + , Model.maxVal = Just maxBound + } + invitation :: Model invitation = defineModel "Invitation" $ do description "An invitation to join Wire" property "inviter" bytes' $ description "User ID of the inviter" + property "role" role $ do + description "Role of the invited user" + optional property "id" bytes' $ - description "UUID used to refer the invitation" + description "UUID used to refer to the invitation" property "email" string' $ do description "Email of the invitee" optional diff --git a/libs/brig-types/src/Brig/Types/Team/Invitation.hs b/libs/brig-types/src/Brig/Types/Team/Invitation.hs index 3da0d3bed0e..8a59ffbfd81 100644 --- a/libs/brig-types/src/Brig/Types/Team/Invitation.hs +++ b/libs/brig-types/src/Brig/Types/Team/Invitation.hs @@ -7,18 +7,23 @@ import Brig.Types.Common import Data.Aeson import Data.Id import Data.Json.Util +import Galley.Types.Teams data InvitationRequest = InvitationRequest { irEmail :: !Email , irName :: !Name , irLocale :: !(Maybe Locale) + , irRole :: !(Maybe Role) } deriving (Eq, Show) data Invitation = Invitation { inTeam :: !TeamId + , inRole :: !Role , inInvitation :: !InvitationId , inIdentity :: !Email , inCreatedAt :: !UTCTimeMillis + , inCreatedBy :: !(Maybe UserId) -- ^ this is always 'Just' for new invitations, but for + -- migration it is allowed to be 'Nothing'. } deriving (Eq, Show) data InvitationList = InvitationList @@ -31,26 +36,35 @@ instance FromJSON InvitationRequest where InvitationRequest <$> o .: "email" <*> o .: "inviter_name" <*> o .:? "locale" + <*> o .:? "role" instance ToJSON InvitationRequest where - toJSON i = object [ "email" .= irEmail i - , "inviter_name" .= irName i - , "locale" .= irLocale i - ] + toJSON i = object $ + [ "email" .= irEmail i + , "inviter_name" .= irName i + , "locale" .= irLocale i + , "role" .= irRole i + ] instance FromJSON Invitation where parseJSON = withObject "invitation" $ \o -> Invitation <$> o .: "team" + -- clients, when leaving "role" empty, can leave the default role choice to us + <*> o .:? "role" .!= defaultRole <*> o .: "id" <*> o .: "email" <*> o .: "created_at" + <*> o .:? "created_by" instance ToJSON Invitation where - toJSON i = object [ "team" .= inTeam i - , "id" .= inInvitation i - , "email" .= inIdentity i - , "created_at" .= inCreatedAt i - ] + toJSON i = object $ + [ "team" .= inTeam i + , "role" .= inRole i + , "id" .= inInvitation i + , "email" .= inIdentity i + , "created_at" .= inCreatedAt i + , "created_by" .= inCreatedBy i + ] instance ToJSON InvitationList where toJSON (InvitationList l m) = object diff --git a/libs/brig-types/test/unit/Test/Brig/Types/Arbitrary.hs b/libs/brig-types/test/unit/Test/Brig/Types/Arbitrary.hs index 9925ed3e1de..86db84e6649 100644 --- a/libs/brig-types/test/unit/Test/Brig/Types/Arbitrary.hs +++ b/libs/brig-types/test/unit/Test/Brig/Types/Arbitrary.hs @@ -19,6 +19,7 @@ import Brig.Types.Activation import Brig.Types.Code import Brig.Types.Intra import Brig.Types.Provider (UpdateServiceWhitelist(..)) +import Brig.Types.Team.Invitation import Brig.Types.TURN import Brig.Types.TURN.Internal import Brig.Types.User @@ -42,7 +43,7 @@ import Test.QuickCheck import Test.QuickCheck.Instances () import Text.Hostname - +import qualified Data.Set as Set import qualified Data.Text as ST import qualified System.Random @@ -256,6 +257,9 @@ instance Arbitrary NewTeamUser where , NewTeamMemberSSO <$> arbitrary ] +instance Arbitrary TeamMember where + arbitrary = newTeamMember <$> arbitrary <*> arbitrary <*> arbitrary + instance Arbitrary PasswordChange where arbitrary = PasswordChange <$> arbitrary <*> arbitrary @@ -334,6 +338,26 @@ instance Arbitrary Country where instance Arbitrary UpdateServiceWhitelist where arbitrary = UpdateServiceWhitelist <$> arbitrary <*> arbitrary <*> arbitrary +instance Arbitrary InvitationList where + arbitrary = InvitationList <$> listOf arbitrary <*> arbitrary + +instance Arbitrary Invitation where + arbitrary = Invitation <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary + +instance Arbitrary Permissions where + arbitrary = maybe (error "instance Arbitrary Permissions") pure =<< do + selfperms <- arbitrary + copyperms <- Set.intersection selfperms <$> arbitrary + pure $ newPermissions selfperms copyperms + +instance Arbitrary Perm where + arbitrary = elements [minBound..] + +instance Arbitrary InvitationRequest where + arbitrary = InvitationRequest <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary + +instance Arbitrary Role where + arbitrary = elements [minBound..] ---------------------------------------------------------------------- -- utilities diff --git a/libs/brig-types/test/unit/Test/Brig/Types/User.hs b/libs/brig-types/test/unit/Test/Brig/Types/User.hs index 1d9f51d671c..f0a0ba8e370 100644 --- a/libs/brig-types/test/unit/Test/Brig/Types/User.hs +++ b/libs/brig-types/test/unit/Test/Brig/Types/User.hs @@ -6,17 +6,21 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ViewPatterns #-} +{-# OPTIONS_GHC -Wno-orphans #-} + module Test.Brig.Types.User where import Imports import Brig.Types.Activation import Brig.Types.Intra import Brig.Types.Provider (UpdateServiceWhitelist) +import Brig.Types.Team.Invitation import Brig.Types.User import Data.Aeson import Data.Aeson.Types import Data.Proxy import Data.Typeable (typeOf) +import Galley.Types.Teams import Test.Brig.Types.Arbitrary () import Test.QuickCheck import Test.Tasty @@ -76,6 +80,9 @@ roundtripTests = , run @EmailRemove Proxy , run @EmailUpdate Proxy , run @HandleUpdate Proxy + , run @InvitationList Proxy + , run @Invitation Proxy + , run @InvitationRequest Proxy , run @LocaleUpdate Proxy , run @NewPasswordReset Proxy , run @NewUser Proxy @@ -84,6 +91,7 @@ roundtripTests = , run @PhoneUpdate Proxy , run @ReAuthUser Proxy , run @SelfProfile Proxy + , run @TeamMember Proxy , run @UpdateServiceWhitelist Proxy , run @UserHandleInfo Proxy , run @UserIdentity Proxy diff --git a/libs/extended/extended.cabal b/libs/extended/extended.cabal index 67f62294535..d472f09bef1 100644 --- a/libs/extended/extended.cabal +++ b/libs/extended/extended.cabal @@ -22,12 +22,10 @@ library ghc-options: -Wall -O2 -fwarn-tabs exposed-modules: - UnliftIO.Async.Extended Options.Applicative.Extended build-depends: base - , async-pool , extra , imports , optparse-applicative diff --git a/libs/extended/src/UnliftIO/Async/Extended.hs b/libs/extended/src/UnliftIO/Async/Extended.hs deleted file mode 100644 index 39e171a5c8d..00000000000 --- a/libs/extended/src/UnliftIO/Async/Extended.hs +++ /dev/null @@ -1,55 +0,0 @@ -{-# LANGUAGE TypeOperators #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE ScopedTypeVariables #-} - --- | A version of "UnliftIO.Async" with extra utilities. -module UnliftIO.Async.Extended - ( module UnliftIO.Async - -- * Pooled functions (using at most T threads) - , forPooled - , mapMPooled - , replicatePooled - , sequencePooled - ) where - -import Imports -import UnliftIO -import UnliftIO.Async - -import qualified Control.Concurrent.Async.Pool as Pool - --- | A concurrent variant of 'for' that uses at most T threads. -forPooled - :: MonadUnliftIO m - => Int -> [a] -> (a -> m b) -> m [b] -forPooled t xs f = - withRunInIO $ \runInIO -> - Pool.withTaskGroup t $ \tg -> - Pool.mapConcurrently tg (runInIO . f) xs - --- | A concurrent variant of 'mapM' that uses at most T threads. -mapMPooled - :: MonadUnliftIO m - => Int -> (a -> m b) -> [a] -> m [b] -mapMPooled t f xs = - withRunInIO $ \runInIO -> - Pool.withTaskGroup t $ \tg -> - Pool.mapConcurrently tg (runInIO . f) xs - --- | 'Async.replicateConcurrently' that uses at most T threads. -replicatePooled - :: MonadUnliftIO m - => Int -- ^ How many threads to use - -> Int - -> m a - -> m [a] -replicatePooled t n = sequencePooled t . replicate n - --- | A concurrent variant of 'sequence' that uses at most T threads. -sequencePooled - :: MonadUnliftIO m - => Int -> [m a] -> m [a] -sequencePooled t xs = - withRunInIO $ \runInIO -> - Pool.withTaskGroup t $ \tg -> - Pool.mapConcurrently tg runInIO xs diff --git a/libs/galley-types/galley-types.cabal b/libs/galley-types/galley-types.cabal index 5ea75016e6d..7e50c55d029 100644 --- a/libs/galley-types/galley-types.cabal +++ b/libs/galley-types/galley-types.cabal @@ -10,6 +10,10 @@ build-type: Simple cabal-version: >= 1.10 description: API types of Galley. +flag cql + description: Enable cql instances + default: False + library default-language: Haskell2010 default-extensions: NoImplicitPrelude @@ -28,6 +32,10 @@ library Galley.Types.Teams.Intra Galley.Types.Teams.Swagger + if flag(cql) + cpp-options: "-DWITH_CQL" + build-depends: cql >= 3.0 + build-depends: aeson >= 0.6 , attoparsec >= 0.10 @@ -39,6 +47,8 @@ library , currency-codes >= 2.0 , data-default >= 0.5 , gundeck-types >= 1.15.13 + , errors + , exceptions >= 0.10.0 , lens >= 4.12 , protobuf >= 0.2 , swagger >= 0.1 @@ -50,3 +60,29 @@ library , uri-bytestring >= 0.2 , uuid >= 1.3 , imports + +test-suite galley-types-tests + type: exitcode-stdio-1.0 + hs-source-dirs: test/unit + main-is: Main.hs + + other-modules: + Test.Galley.Types + + build-depends: + base + , imports + , galley-types + , lens + , tasty + , tasty-hunit + , types-common + , containers + + ghc-options: + -Wall + -threaded + -with-rtsopts=-N + + default-language: Haskell2010 + default-extensions: NoImplicitPrelude diff --git a/libs/galley-types/src/Galley/Types/Teams.hs b/libs/galley-types/src/Galley/Types/Teams.hs index 6d595758f73..8f4fec49cc2 100644 --- a/libs/galley-types/src/Galley/Types/Teams.hs +++ b/libs/galley-types/src/Galley/Types/Teams.hs @@ -1,6 +1,8 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE StrictData #-} @@ -26,9 +28,12 @@ module Galley.Types.Teams , TeamMember , newTeamMember + , newTeamMemberRaw , userId , permissions + , invitation , teamMemberJson + , canSeePermsOf , TeamMemberList , notTeamMember @@ -53,6 +58,7 @@ module Galley.Types.Teams , noPermissions , serviceWhitelistPermissions , hasPermission + , mayGrantPermission , isTeamOwner , self , copy @@ -63,6 +69,10 @@ module Galley.Types.Teams , intToPerm , intToPerms + , Role (..) + , defaultRole + , rolePermissions + , BindingNewTeam (..) , NonBindingNewTeam (..) , NewTeam @@ -101,7 +111,9 @@ module Galley.Types.Teams ) where import Imports +import Control.Exception (ErrorCall(ErrorCall)) import Control.Lens (makeLenses, view, (^.)) +import Control.Monad.Catch import Data.Aeson import Data.Aeson.Types (Parser, Pair) import Data.Bits (testBit, (.|.)) @@ -114,6 +126,10 @@ import Galley.Types.Teams.Internal import qualified Data.HashMap.Strict as HashMap import qualified Data.Set as Set +#ifdef WITH_CQL +import qualified Control.Error.Util as Err +import qualified Database.CQL.Protocol as Cql +#endif data Event = Event { _eventType :: EventType @@ -191,6 +207,7 @@ data TeamList = TeamList data TeamMember = TeamMember { _userId :: UserId , _permissions :: Permissions + , _invitation :: Maybe (UserId, UTCTimeMillis) } deriving (Eq, Ord, Show) newtype TeamMemberList = TeamMemberList @@ -216,8 +233,8 @@ data Perm = | DeleteConversation | AddTeamMember | RemoveTeamMember - | AddConversationMember - | RemoveConversationMember + | AddRemoveConvMember + | ModifyConvMetadata | GetBilling | SetBilling | SetTeamData @@ -225,10 +242,44 @@ data Perm = | SetMemberPermissions | GetTeamConversations | DeleteTeam + -- FUTUREWORK: make the verbs in the roles more consistent + -- (CRUD vs. Add,Remove vs; Get,Set vs. Create,Delete etc). -- If you ever think about adding a new permission flag, -- read Note [team roles] first. deriving (Eq, Ord, Show, Enum, Bounded) +data Role = RoleOwner | RoleAdmin | RoleMember | RoleExternalPartner + deriving (Eq, Ord, Show, Enum, Bounded) + +defaultRole :: Role +defaultRole = RoleMember + +rolePermissions :: Role -> Permissions +rolePermissions role = Permissions p p where p = rolePerms role + +rolePerms :: Role -> Set Perm +rolePerms RoleOwner = rolePerms RoleAdmin <> Set.fromList + [ GetBilling + , SetBilling + , DeleteTeam + ] +rolePerms RoleAdmin = rolePerms RoleMember <> Set.fromList + [ AddTeamMember + , RemoveTeamMember + , SetTeamData + , SetMemberPermissions + ] +rolePerms RoleMember = rolePerms RoleExternalPartner <> Set.fromList + [ DeleteConversation + , AddRemoveConvMember + , ModifyConvMetadata + , GetMemberPermissions + ] +rolePerms RoleExternalPartner = Set.fromList + [ CreateConversation + , GetTeamConversations + ] + newtype BindingNewTeam = BindingNewTeam (NewTeam ()) deriving (Eq, Show) @@ -258,9 +309,17 @@ newTeam tid uid nme ico bnd = Team tid uid nme ico Nothing bnd newTeamList :: [Team] -> Bool -> TeamList newTeamList = TeamList -newTeamMember :: UserId -> Permissions -> TeamMember +newTeamMember :: UserId -> Permissions -> Maybe (UserId, UTCTimeMillis) -> TeamMember newTeamMember = TeamMember +-- | This is called in "Galley.Data". It throws an exception if the input is inconsistent, meaning +-- that one of inviter and invitation timestamp is Nothing and the other is Just. This is justified +-- because it can only be caused by +newTeamMemberRaw :: MonadThrow m => UserId -> Permissions -> Maybe UserId -> Maybe UTCTimeMillis -> m TeamMember +newTeamMemberRaw uid perms (Just invu) (Just invt) = pure $ TeamMember uid perms (Just (invu, invt)) +newTeamMemberRaw uid perms Nothing Nothing = pure $ TeamMember uid perms Nothing +newTeamMemberRaw _ _ _ _ = throwM $ ErrorCall "TeamMember with incomplete metadata." + newTeamMemberList :: [TeamMember] -> TeamMemberList newTeamMemberList = TeamMemberList @@ -333,13 +392,16 @@ noPermissions = Permissions mempty mempty serviceWhitelistPermissions :: Set Perm serviceWhitelistPermissions = Set.fromList [ AddTeamMember, RemoveTeamMember - , RemoveConversationMember + , AddRemoveConvMember , SetTeamData ] hasPermission :: TeamMember -> Perm -> Bool hasPermission tm p = p `Set.member` (tm^.permissions.self) +mayGrantPermission :: TeamMember -> Perm -> Bool +mayGrantPermission tm p = p `Set.member` (tm^.permissions.copy) + -- Note [team roles] -- ~~~~~~~~~~~~ -- @@ -347,7 +409,7 @@ hasPermission tm p = p `Set.member` (tm^.permissions.self) -- permissions: -- -- member = --- {Add/RemoveConversationMember, Create/DeleteConversation, +-- {AddRemoveConvMember, Create/DeleteConversation, -- GetMemberPermissions, GetTeamConversations} -- -- admin = member + @@ -385,8 +447,8 @@ permToInt CreateConversation = 0x0001 permToInt DeleteConversation = 0x0002 permToInt AddTeamMember = 0x0004 permToInt RemoveTeamMember = 0x0008 -permToInt AddConversationMember = 0x0010 -permToInt RemoveConversationMember = 0x0020 +permToInt AddRemoveConvMember = 0x0010 +permToInt ModifyConvMetadata = 0x0020 permToInt GetBilling = 0x0040 permToInt SetBilling = 0x0080 permToInt SetTeamData = 0x0100 @@ -400,8 +462,8 @@ intToPerm 0x0001 = Just CreateConversation intToPerm 0x0002 = Just DeleteConversation intToPerm 0x0004 = Just AddTeamMember intToPerm 0x0008 = Just RemoveTeamMember -intToPerm 0x0010 = Just AddConversationMember -intToPerm 0x0020 = Just RemoveConversationMember +intToPerm 0x0010 = Just AddRemoveConvMember +intToPerm 0x0020 = Just ModifyConvMetadata intToPerm 0x0040 = Just GetBilling intToPerm 0x0080 = Just SetBilling intToPerm 0x0100 = Just SetTeamData @@ -430,18 +492,48 @@ instance FromJSON TeamList where TeamList <$> o .: "teams" <*> o .: "has_more" -teamMemberJson :: Bool -> TeamMember -> Value -teamMemberJson False m = object [ "user" .= _userId m ] -teamMemberJson True m = object [ "user" .= _userId m, "permissions" .= _permissions m ] - -teamMemberListJson :: Bool -> TeamMemberList -> Value -teamMemberListJson withPerm l = - object [ "members" .= map (teamMemberJson withPerm) (_teamMembers l) ] +instance ToJSON TeamMember where + toJSON = teamMemberJson (const True) + +-- | Show 'Permissions' conditionally. The condition takes the member that will receive the result +-- into account. See 'canSeePermsOf'. +teamMemberJson :: (TeamMember -> Bool) -> TeamMember -> Value +teamMemberJson withPerms m = object $ + [ "user" .= _userId m ] <> + [ "permissions" .= _permissions m | withPerms m ] <> + [ "created_by" .= (fst <$> _invitation m) ] <> + [ "created_at" .= (snd <$> _invitation m) ] + +-- | Use this to construct the condition expected by 'teamMemberJson', 'teamMemberListJson' +canSeePermsOf :: TeamMember -> TeamMember -> Bool +canSeePermsOf seeer seeee = + seeer `hasPermission` GetMemberPermissions || seeer == seeee + +parseTeamMember :: Value -> Parser TeamMember +parseTeamMember = withObject "team-member" $ \o -> + TeamMember <$> o .: "user" + <*> o .: "permissions" + <*> parseInvited o + where + parseInvited :: Object -> Parser (Maybe (UserId, UTCTimeMillis)) + parseInvited o = do + invby <- o .:? "created_by" + invat <- o .:? "created_at" + case (invby, invat) of + (Just b, Just a) -> pure $ Just (b, a) + (Nothing, Nothing) -> pure $ Nothing + _ -> fail "created_by, created_at" + +instance ToJSON TeamMemberList where + toJSON = teamMemberListJson (const True) + +-- | Show a list of team members using 'teamMemberJson'. +teamMemberListJson :: (TeamMember -> Bool) -> TeamMemberList -> Value +teamMemberListJson withPerms l = + object [ "members" .= map (teamMemberJson withPerms) (_teamMembers l) ] instance FromJSON TeamMember where - parseJSON = withObject "team-member" $ \o -> - TeamMember <$> o .: "user" - <*> o .: "permissions" + parseJSON = parseTeamMember instance FromJSON TeamMemberList where parseJSON = withObject "team member list" $ \o -> @@ -478,6 +570,24 @@ instance FromJSON Permissions where Nothing -> fail "invalid permissions" Just ps -> pure ps +instance ToJSON Role where + toJSON RoleOwner = "owner" + toJSON RoleAdmin = "admin" + toJSON RoleMember = "member" + toJSON RoleExternalPartner = "partner" + +instance FromJSON Role where + parseJSON = withText "Role" $ \case + "owner" -> pure RoleOwner + "admin" -> pure RoleAdmin + "member" -> pure RoleMember + "partner" -> pure RoleExternalPartner + "collaborator" -> pure RoleExternalPartner + -- 'collaborator' was used for a short period of time on staging. if you are + -- wondering about this, it's probably safe to remove. + -- ~fisx, Wed Jan 23 16:38:52 CET 2019 + bad -> fail $ "not a role: " <> show bad + newTeamJson :: NewTeam a -> [Pair] newTeamJson (NewTeam n i ik _) = "name" .= fromRange n @@ -491,14 +601,14 @@ instance ToJSON BindingNewTeam where instance ToJSON NonBindingNewTeam where toJSON (NonBindingNewTeam t) = object - $ "members" .= (map (teamMemberJson True) . fromRange <$> _newTeamMembers t) + $ "members" .= (fromRange <$> _newTeamMembers t) # newTeamJson t deriving instance FromJSON BindingNewTeam deriving instance FromJSON NonBindingNewTeam instance ToJSON NewTeamMember where - toJSON t = object ["member" .= teamMemberJson True (_ntmNewTeamMember t)] + toJSON t = object ["member" .= _ntmNewTeamMember t] instance FromJSON NewTeamMember where parseJSON = withObject "add team member" $ \o -> @@ -625,3 +735,36 @@ instance ToJSON TeamDeleteData where toJSON tdd = object [ "password" .= _tdAuthPassword tdd ] + +#ifdef WITH_CQL +instance Cql.Cql Role where + ctype = Cql.Tagged Cql.IntColumn + + toCql RoleOwner = Cql.CqlInt 1 + toCql RoleAdmin = Cql.CqlInt 2 + toCql RoleMember = Cql.CqlInt 3 + toCql RoleExternalPartner = Cql.CqlInt 4 + + fromCql (Cql.CqlInt i) = case i of + 1 -> return RoleOwner + 2 -> return RoleAdmin + 3 -> return RoleMember + 4 -> return RoleExternalPartner + n -> fail $ "Unexpected Role value: " ++ show n + fromCql _ = fail "Role value: int expected" + +instance Cql.Cql Permissions where + ctype = Cql.Tagged $ Cql.UdtColumn "permissions" [("self", Cql.BigIntColumn), ("copy", Cql.BigIntColumn)] + + toCql p = + let f = Cql.CqlBigInt . fromIntegral . permsToInt in + Cql.CqlUdt [("self", f (p^.self)), ("copy", f (p^.copy))] + + fromCql (Cql.CqlUdt p) = do + let f = intToPerms . fromIntegral :: Int64 -> Set.Set Perm + s <- Err.note "missing 'self' permissions" ("self" `lookup` p) >>= Cql.fromCql + d <- Err.note "missing 'copy' permissions" ("copy" `lookup` p) >>= Cql.fromCql + r <- Err.note "invalid permissions" (newPermissions (f s) (f d)) + pure r + fromCql _ = fail "permissions: udt expected" +#endif diff --git a/libs/galley-types/src/Galley/Types/Teams/Swagger.hs b/libs/galley-types/src/Galley/Types/Teams/Swagger.hs index 99aea68b008..c704808bbb2 100644 --- a/libs/galley-types/src/Galley/Types/Teams/Swagger.hs +++ b/libs/galley-types/src/Galley/Types/Teams/Swagger.hs @@ -89,9 +89,13 @@ teamMember = defineModel "TeamMember" $ do description "team member data" property "user" bytes' $ description "user ID" - property "permissions" (ref permissions) $ + property "permissions" (ref permissions) $ do description "The permissions this user has in the given team \ \ (only visible with permission `GetMemberPermissions`)." + optional -- not optional in the type, but in the json instance. (in + -- servant, we could probably just add a helper type for this.) + -- TODO: even without servant, it would be nicer to introduce + -- a type with optional permissions. permissions :: Model permissions = defineModel "Permissions" $ do diff --git a/libs/galley-types/test/unit/Main.hs b/libs/galley-types/test/unit/Main.hs new file mode 100644 index 00000000000..905451893be --- /dev/null +++ b/libs/galley-types/test/unit/Main.hs @@ -0,0 +1,10 @@ +module Main (main) where + +import Imports +import qualified Test.Galley.Types +import Test.Tasty + +main :: IO () +main = defaultMain $ testGroup "Tests" + [ Test.Galley.Types.tests + ] diff --git a/libs/galley-types/test/unit/Test/Galley/Types.hs b/libs/galley-types/test/unit/Test/Galley/Types.hs new file mode 100644 index 00000000000..cb5fcba0893 --- /dev/null +++ b/libs/galley-types/test/unit/Test/Galley/Types.hs @@ -0,0 +1,24 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} + +module Test.Galley.Types where + +import Imports +import Control.Lens +import Galley.Types.Teams +import Test.Tasty +import Test.Tasty.HUnit +import Data.Set hiding (drop) + +tests :: TestTree +tests = testGroup "Tests" + [ testCase "owner has all permissions" $ + rolePermissions RoleOwner @=? fullPermissions + , testCase "smaller roles (further to the left/top in the type def) are strictly more powerful" $ + -- we may not want to maintain this property in the future when adding more roles, but for + -- now it's true, and it's nice to have that written down somewhere. + forM_ [(r1, r2) | r1 <- [minBound..], r2 <- drop 1 [r1..]] $ \(r1, r2) -> do + assertBool "owner.self" ((rolePermissions r2 ^. self) `isSubsetOf` (rolePermissions r1 ^. self)) + assertBool "owner.copy" ((rolePermissions r2 ^. copy) `isSubsetOf` (rolePermissions r1 ^. copy)) + ] diff --git a/libs/gundeck-types/src/Gundeck/Types/Presence.hs b/libs/gundeck-types/src/Gundeck/Types/Presence.hs index 42fba263b48..98a6fc896c6 100644 --- a/libs/gundeck-types/src/Gundeck/Types/Presence.hs +++ b/libs/gundeck-types/src/Gundeck/Types/Presence.hs @@ -14,14 +14,19 @@ import Gundeck.Types.Common as Common import qualified Data.ByteString.Lazy as Lazy +-- | This is created in gundeck by cannon every time the client opens a new websocket connection. +-- (That's why we always have a 'ConnId' from the most recent connection by that client.) data Presence = Presence { userId :: !UserId , connId :: !ConnId , resource :: !URI -- ^ cannon instance hosting the presence - , clientId :: !(Maybe ClientId) + , clientId :: !(Maybe ClientId) -- ^ This is 'Nothing' if either (a) the presence is older + -- than mandatory end-to-end encryption, or (b) the client is + -- operating the team settings pages without the need for + -- end-to-end crypto. , createdAt :: !Milliseconds - , __field :: !Lazy.ByteString -- temp. addition to ease migration - } deriving (Eq, Show) + , __field :: !Lazy.ByteString -- ^ REFACTOR: temp. addition to ease migration + } deriving (Eq, Ord, Show) instance ToJSON Presence where toJSON p = object diff --git a/libs/gundeck-types/src/Gundeck/Types/Push/V2.hs b/libs/gundeck-types/src/Gundeck/Types/Push/V2.hs index cd4a23cc397..07d6795590e 100644 --- a/libs/gundeck-types/src/Gundeck/Types/Push/V2.hs +++ b/libs/gundeck-types/src/Gundeck/Types/Push/V2.hs @@ -3,9 +3,10 @@ {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} module Gundeck.Types.Push.V2 - ( Push + ( Push (..) , newPush , pushRecipients , pushOrigin @@ -20,7 +21,8 @@ module Gundeck.Types.Push.V2 , singletonRecipient , singletonPayload - , Recipient + , Recipient (..) + , RecipientClients (..) , recipient , recipientId , recipientRoute @@ -72,8 +74,10 @@ import qualified Data.Set as Set data Route = RouteAny - | RouteDirect - | RouteNative + | RouteDirect -- ^ 'RouteDirect' messages are different from transient messages: they do not + -- trigger native pushes if the web socket is unavaiable, but they are stored in + -- cassandra for later pickup. + | RouteNative -- ^ REFACTOR: this can probably be removed. deriving (Eq, Ord, Show) instance FromJSON Route where @@ -93,7 +97,7 @@ instance ToJSON Route where data Recipient = Recipient { _recipientId :: !UserId , _recipientRoute :: !Route - , _recipientClients :: ![ClientId] + , _recipientClients :: !RecipientClients } deriving (Show) instance Eq Recipient where @@ -102,24 +106,40 @@ instance Eq Recipient where instance Ord Recipient where compare r r' = compare (_recipientId r) (_recipientId r') +data RecipientClients + = RecipientClientsAll -- ^ All clients of some user + | RecipientClientsSome (List1 ClientId) -- ^ An explicit list of clients + deriving (Eq, Show) + makeLenses ''Recipient recipient :: UserId -> Route -> Recipient -recipient u r = Recipient u r [] +recipient u r = Recipient u r RecipientClientsAll instance FromJSON Recipient where parseJSON = withObject "Recipient" $ \p -> Recipient <$> p .: "user_id" <*> p .: "route" - <*> p .:? "clients" .!= [] + <*> p .:? "clients" .!= RecipientClientsAll instance ToJSON Recipient where toJSON (Recipient u r c) = object $ "user_id" .= u # "route" .= r - # "clients" .= (if null c then Nothing else Just c) + # "clients" .= c # [] +-- "All clients" is encoded in the API as an empty list. +instance FromJSON RecipientClients where + parseJSON x = parseJSON @[ClientId] x >>= \case + [] -> pure RecipientClientsAll + c:cs -> pure (RecipientClientsSome (list1 c cs)) + +instance ToJSON RecipientClients where + toJSON = toJSON . \case + RecipientClientsAll -> [] + RecipientClientsSome cs -> toList cs + ----------------------------------------------------------------------------- -- ApsData @@ -177,6 +197,17 @@ instance FromJSON ApsData where ----------------------------------------------------------------------------- -- Priority +-- | REFACTOR: do we ever use LowPriority? to test, (a) remove the constructor and see what goes +-- wrong; (b) log use of 'LowPriority' by clients in production and watch it a few days. if it is +-- not used anywhere, consider removing the entire type, or just the unused constructor. +-- +-- @neongreen writes: [...] nobody seems to ever set `native_priority` in the client code. Exhibits +-- A1 and A2: +-- +-- * +-- * +-- +-- see also: 'Galley.Types.Proto.Priority'. data Priority = LowPriority | HighPriority deriving (Eq, Show, Ord, Enum) @@ -197,18 +228,20 @@ data Push = Push { _pushRecipients :: Range 1 1024 (Set Recipient) -- ^ Recipients -- - -- TODO: should be @Set (Recipient, Maybe (NonEmptySet ConnId))@, and '_pushConnections' - -- should go away. Rationale: the current setup only works under the assumption that no - -- 'ConnId' is used by two 'Recipient's. This is *probably* correct, but not in any contract. - -- Coincidentally, where are we using '_pushConnections' to limit pushes to individual - -- devices? Is it possible we can remove '_pushConnections' without touching - -- '_pushRecipients'? - -- - -- REFACTOR: is it possible that 'pushOrigin' has been refactored away in #531? + -- REFACTOR: '_pushRecipients' should be @Set (Recipient, Maybe (NonEmptySet ConnId))@, and + -- '_pushConnections' should go away. Rationale: the current setup only works under the + -- assumption that no 'ConnId' is used by two 'Recipient's. This is *probably* correct, but + -- not in any contract. (Changing this may require a new version module, since we need to + -- support both the old and the new data type simultaneously during upgrade.) , _pushOrigin :: !UserId -- ^ Originating user + -- + -- REFACTOR: where is this required, and for what? or can it be removed? (see also: #531) , _pushConnections :: !(Set ConnId) - -- ^ Destination connections, if a directed push is desired. + -- ^ Destination connections. If empty, ignore. Otherwise, filter the connections derived + -- from '_pushRecipients' and only push to those contained in this set. + -- + -- REFACTOR: change this to @_pushConnectionWhitelist :: Maybe (Set ConnId)@. , _pushOriginConnection :: !(Maybe ConnId) -- ^ Originating connection, if any. , _pushTransient :: !Bool @@ -222,7 +255,7 @@ data Push = Push -- REFACTOR: this make no sense any more since native push notifications have no more payload. -- https://github.com/wireapp/wire-server/pull/546 , _pushNativeAps :: !(Maybe ApsData) - -- ^ APNs-specific metadata. + -- ^ APNs-specific metadata. REFACTOR: can this be removed? , _pushNativePriority :: !Priority -- ^ Native push priority. , _pushPayload :: !(List1 Object) diff --git a/libs/libzauth/README.md b/libs/libzauth/README.md new file mode 100644 index 00000000000..0d60c96ccd8 --- /dev/null +++ b/libs/libzauth/README.md @@ -0,0 +1,23 @@ +# libzauth + +## Compile and install natively + +To build libzauth natively, you will need to have the usual C compiler toolchains installed. + +### Ubuntu / Debian + +#### Building / Installing + +```bash +make dist +sudo dpkg -i libzauth*.deb +sudo ldconfig +``` + +### Generic + +#### Building / Installing + +```bash +make install +``` diff --git a/libs/libzauth/libzauth-c/Makefile b/libs/libzauth/libzauth-c/Makefile index 60aaa43a54c..d75be640112 100644 --- a/libs/libzauth/libzauth-c/Makefile +++ b/libs/libzauth/libzauth-c/Makefile @@ -1,12 +1,14 @@ LANG := en_US.UTF-8 SHELL := /usr/bin/env bash VERSION := "3.0.0" -ARCH := amd64 +ARCH := $(shell if [ -f "`which dpkg-architecture`" ]; then dpkg-architecture -qDEB_HOST_ARCH; else [ -f "`which dpkg`" ] && dpkg --print-architecture; fi ) BUILD ?= 1 OS := $(shell uname -s | tr '[:upper:]' '[:lower:]') # If we can install libzauth globally, we'll install it there, otherwise # it'll go into ~/.wire-dev (unless it's overridden) -PREFIX ?= $(shell [ -w /usr/local ] && echo /usr/local || echo "$(HOME)/.wire-dev") +PREFIX_INSTALL ?= $(shell [ -w /usr/local ] && echo /usr/local || echo "$(HOME)/.wire-dev") +# If we are building a debian package, just use /usr/local. +PREFIX_PACKAGE ?= /usr/local ifeq ($(OS), darwin) LIB_TYPE := dylib @@ -19,7 +21,7 @@ all: build clean: cargo clean rm -rf test/target - rm -rf deb$(PREFIX)/ + rm -rf deb$(PREFIX_PACKAGE)/ build: cargo build @@ -28,27 +30,27 @@ build-release: cargo build --release install: build-release - mkdir -p $(PREFIX)/include - mkdir -p $(PREFIX)/lib/pkgconfig - cp src/zauth.h $(PREFIX)/include/ + mkdir -p $(PREFIX_INSTALL)/include + mkdir -p $(PREFIX_INSTALL)/lib/pkgconfig + cp src/zauth.h $(PREFIX_INSTALL)/include/ sed -e "s~<>~$(VERSION)~" \ - -e "s~<>~$(PREFIX)~" \ - src/libzauth.pc > $(PREFIX)/lib/pkgconfig/libzauth.pc - cp target/release/libzauth.$(LIB_TYPE) $(PREFIX)/lib/ + -e "s~<>~$(PREFIX_INSTALL)~" \ + src/libzauth.pc > $(PREFIX_INSTALL)/lib/pkgconfig/libzauth.pc + cp target/release/libzauth.$(LIB_TYPE) $(PREFIX_INSTALL)/lib/ uninstall: - rm -f $(PREFIX)/include/zauth.h - rm -f $(PREFIX)/lib/libzauth.$(LIB_TYPE) - rm -f $(PREFIX)/lib/pkgconfig/libzauth.pc + rm -f $(PREFIX_INSTALL)/include/zauth.h + rm -f $(PREFIX_INSTALL)/lib/libzauth.$(LIB_TYPE) + rm -f $(PREFIX_INSTALL)/lib/pkgconfig/libzauth.pc dist: build-release - mkdir -p deb$(PREFIX)/include - mkdir -p deb$(PREFIX)/lib/pkgconfig - cp src/zauth.h deb$(PREFIX)/include/ + mkdir -p deb$(PREFIX_PACKAGE)/include + mkdir -p deb$(PREFIX_PACKAGE)/lib/pkgconfig + cp src/zauth.h deb$(PREFIX_PACKAGE)/include/ sed -e "s~<>~$(VERSION)~" \ - -e "s~<>~$(PREFIX)~" \ - src/libzauth.pc > deb$(PREFIX)/lib/pkgconfig/libzauth.pc - cp target/release/libzauth.$(LIB_TYPE) deb$(PREFIX)/lib/ + -e "s~<>~$(PREFIX_PACKAGE)~" \ + src/libzauth.pc > deb$(PREFIX_PACKAGE)/lib/pkgconfig/libzauth.pc + cp target/release/libzauth.$(LIB_TYPE) deb$(PREFIX_PACKAGE)/lib/ ifeq ($(OS), linux) makedeb --name=libzauth \ --version=$(VERSION) \ diff --git a/libs/libzauth/libzauth-c/deb/DEBIAN/control b/libs/libzauth/libzauth-c/deb/DEBIAN/control index 295f4c87c03..e332a9282fa 100644 --- a/libs/libzauth/libzauth-c/deb/DEBIAN/control +++ b/libs/libzauth/libzauth-c/deb/DEBIAN/control @@ -1,5 +1,5 @@ Package: libzauth Version: <>+<> Maintainer: Wire Swiss GmbH -Architecture: amd64 +Architecture: <> Description: zauth token parsing and verification diff --git a/libs/libzauth/libzauth-c/src/lib.rs b/libs/libzauth/libzauth-c/src/lib.rs index e7265cae340..7a688af6d84 100644 --- a/libs/libzauth/libzauth-c/src/lib.rs +++ b/libs/libzauth/libzauth-c/src/lib.rs @@ -1,7 +1,7 @@ extern crate libc; extern crate zauth; -use libc::{c_long, size_t, uint8_t}; +use libc::{size_t, uint8_t}; use std::char; use std::fs::File; use std::io::{self, BufReader, Read}; @@ -130,10 +130,11 @@ pub extern fn zauth_token_type(t: &ZauthToken) -> ZauthTokenType { From::from(t.0.token_type) } -#[no_mangle] -pub extern fn zauth_token_time(t: &ZauthToken) -> c_long { - t.0.timestamp -} +// Commented out, looks unused, and causing portability issues with ia32. +//#[no_mangle] +//pub extern fn zauth_token_time(t: &ZauthToken) -> c_long { +// t.0.timestamp +//} #[no_mangle] pub extern fn zauth_token_version(t: &ZauthToken) -> uint8_t { diff --git a/libs/types-common/src/Data/Id.hs b/libs/types-common/src/Data/Id.hs index 8fa7298f4dc..97e1908f0e9 100644 --- a/libs/types-common/src/Data/Id.hs +++ b/libs/types-common/src/Data/Id.hs @@ -16,6 +16,7 @@ import Data.ByteString.Builder (byteString) import Data.ByteString.Conversion import Data.Default (Default(..)) import Data.Hashable (Hashable) +import Data.String.Conversions (cs) import Data.Text.Encoding (decodeUtf8, encodeUtf8) import Data.Text.Lazy (toStrict) import Data.Text.Lazy.Builder @@ -34,7 +35,6 @@ import qualified Data.ByteString.Lazy as L import Data.ProtocolBuffers.Internal #endif -import qualified Data.ByteString.Char8 as B import qualified Data.Text as T import qualified Data.UUID as UUID @@ -66,6 +66,8 @@ newtype Id a = Id { toUUID :: UUID } deriving (Eq, Ord, Generic, NFData) +-- REFACTOR: non-derived, custom show instances break pretty-show and violate the law +-- that @show . read == id@. can we derive Show here? instance Show (Id a) where show = toString . toUUID @@ -135,10 +137,11 @@ instance Arbitrary (Id a) where -- ConnId ---------------------------------------------------------------------- --- | Handle for a device. Used mostly by Cannon and Gundeck to identify a websocket connection. --- Historically, it is older than 'ClientId' and precedes end-to-end encryption, but it may be --- replaced by 'ClientId' at some point in the future. Unique only together with a 'UserId', stored --- in Redis, lives as long as the device is connected. +-- | Handle for a device. Derived from the access token (see 'Data.ZAuth.Token.Access'). Unique +-- only together with a 'UserId'. Historically, it is older than 'ClientId' and precedes end-to-end +-- encryption, but there are still situations in which 'ClientId' is not applicable (See also: +-- 'Presence'). Used by Cannon and Gundeck to identify a websocket connection, but also in other +-- places. newtype ConnId = ConnId { fromConnId :: ByteString } deriving ( Eq @@ -165,23 +168,26 @@ instance FromJSON ConnId where -- lives as long as the device is registered. See also: 'ConnId'. newtype ClientId = ClientId { client :: Text - } deriving (Eq, Ord, Show, ToByteString, Hashable, NFData, ToJSON) - -instance FromByteString ClientId where - parser = do - x <- takeByteString - unless (B.length x <= 20 && B.all isHexDigit x) $ - fail "Invalid client ID" - either fail (return . ClientId) (runParser parser x) + } deriving (Eq, Ord, Show, ToByteString, Hashable, NFData, ToJSON, ToJSONKey) newClientId :: Word64 -> ClientId newClientId = ClientId . toStrict . toLazyText . hexadecimal +clientIdFromByteString :: Text -> Either String ClientId +clientIdFromByteString txt = if T.length txt <= 20 && T.all isHexDigit txt + then Right $ ClientId txt + else Left "Invalid ClientId" + +instance FromByteString ClientId where + parser = do + bs <- takeByteString + either fail pure $ clientIdFromByteString (cs bs) + instance FromJSON ClientId where - parseJSON = withText "ClientId" $ \x -> do - unless (T.length x <= 20 && T.all isHexDigit x) $ - fail "Invalid ClientId" - return (ClientId x) + parseJSON = withText "ClientId" $ either fail pure . clientIdFromByteString + +instance FromJSONKey ClientId where + fromJSONKey = FromJSONKeyTextParser $ either fail pure . clientIdFromByteString #ifdef WITH_CQL deriving instance Cql ClientId diff --git a/libs/types-common/src/Data/Json/Util.hs b/libs/types-common/src/Data/Json/Util.hs index edd2d99c5f6..224914cae09 100644 --- a/libs/types-common/src/Data/Json/Util.hs +++ b/libs/types-common/src/Data/Json/Util.hs @@ -54,7 +54,7 @@ infixr 5 # -- Construct values using 'toUTCTimeMillis'; deconstruct with 'fromUTCTimeMillis'. -- Unlike with 'UTCTime', 'Show' renders ISO string. newtype UTCTimeMillis = UTCTimeMillis { fromUTCTimeMillis :: UTCTime } - deriving (Eq) + deriving (Eq, Ord) {-# INLINE toUTCTimeMillis #-} toUTCTimeMillis :: HasCallStack => UTCTime -> UTCTimeMillis diff --git a/libs/types-common/types-common.cabal b/libs/types-common/types-common.cabal index b623212fef3..1188dc21e46 100644 --- a/libs/types-common/types-common.cabal +++ b/libs/types-common/types-common.cabal @@ -73,6 +73,7 @@ library , safe >= 0.3 , scientific >= 0.3.4 , singletons >= 2.0 + , string-conversions , swagger >= 0.3 , tagged >= 0.8 , tasty >= 0.11 diff --git a/libs/wai-utilities/src/Network/Wai/Utilities/Server.hs b/libs/wai-utilities/src/Network/Wai/Utilities/Server.hs index ec641c5734a..e45415344e3 100644 --- a/libs/wai-utilities/src/Network/Wai/Utilities/Server.hs +++ b/libs/wai-utilities/src/Network/Wai/Utilities/Server.hs @@ -195,8 +195,9 @@ catchErrors l m app req k = errorResponse ex = do er <- runHandlers ex errorHandlers when (statusCode (Error.code er) >= 500) $ - logIO l Log.Error (Just req) (show ex) + logIO l Log.Error (Just req) (oneline <$> show ex) onError l m req k er + oneline c = if isSpace c then ' ' else c {-# INLINEABLE catchErrors #-} -- | Standard handlers for turning exceptions into appropriate diff --git a/libs/zauth/src/Data/ZAuth/Token.hs b/libs/zauth/src/Data/ZAuth/Token.hs index 8685389fc25..0d7a4283c50 100644 --- a/libs/zauth/src/Data/ZAuth/Token.hs +++ b/libs/zauth/src/Data/ZAuth/Token.hs @@ -80,7 +80,7 @@ data Header = Header data Access = Access { _userId :: !UUID - , _connection :: !Word64 + , _connection :: !Word64 -- ^ 'ConnId' is derived from this. } deriving (Eq, Show) data User = User diff --git a/services/brig/Dockerfile b/services/brig/Dockerfile deleted file mode 100644 index 0ee00c9bf23..00000000000 --- a/services/brig/Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -# Requires docker >= 17.05 (requires support for multi-stage builds) -# Requires to have created the wire-server-builder and wire-server-deps docker images - -ARG builder=quay.io/wire/alpine-builder -ARG deps=quay.io/wire/alpine-deps - -#--- Builder stage --- -FROM ${builder} as builder - -ARG target=install - -COPY . /src/wire-server/ - -RUN cd /src/wire-server/services/brig && make ${target} - -#--- Minified stage --- -FROM ${deps} - -ARG executable -COPY --from=builder /src/wire-server/dist/${executable} /usr/bin/${executable} -COPY --from=builder /src/wire-server/services/brig/deb/opt/brig/templates/ /usr/share/wire/templates/ - -# ARGs are not available at runtime, create symlink at build time -# more info: https://stackoverflow.com/questions/40902445/using-variable-interpolation-in-string-in-docker -RUN ln -s /usr/bin/${executable} /usr/bin/service -ENTRYPOINT ["/usr/bin/dumb-init", "--", "/usr/bin/service"] diff --git a/services/brig/Makefile b/services/brig/Makefile index 48b67282ccd..c8ae9bebf9d 100644 --- a/services/brig/Makefile +++ b/services/brig/Makefile @@ -13,6 +13,8 @@ DEB_IT := dist/$(NAME)-integration_$(VERSION)+$(BUILD)_amd64.deb DEB_SCHEMA := dist/$(NAME)-schema_$(VERSION)+$(BUILD)_amd64.deb DEB_INDEX := dist/$(NAME)-index_$(VERSION)+$(BUILD)_amd64.deb EXECUTABLES := $(NAME) $(NAME)-integration $(NAME)-schema $(NAME)-index +DOCKER_USER ?= quay.io/wire +DOCKER_TAG ?= local guard-%: @ if [ "${${*}}" = "" ]; then \ @@ -127,10 +129,12 @@ index-reset: install .PHONY: docker docker: $(foreach executable,$(EXECUTABLES),\ - docker build -t $(executable) \ - -f Dockerfile \ + docker build -t $(DOCKER_USER)/$(executable):$(DOCKER_TAG) \ + -f ../../build/alpine/Dockerfile.executable \ --build-arg executable=$(executable) \ - ../.. \ + ../.. && \ + docker tag $(DOCKER_USER)/$(executable):$(DOCKER_TAG) $(DOCKER_USER)/$(executable):latest && \ + if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/$(executable):$(DOCKER_TAG); docker push $(DOCKER_USER)/$(executable):latest; fi \ ;) .PHONY: time diff --git a/services/brig/brig.cabal b/services/brig/brig.cabal index d5bb3f43155..5a38d14468b 100644 --- a/services/brig/brig.cabal +++ b/services/brig/brig.cabal @@ -148,7 +148,6 @@ library , iproute >= 1.5 , lens >= 3.8 , lens-aeson >= 1.0 - , lifted-async >= 0.9.3 , lifted-base >= 0.2 , mime , mime-mail >= 0.4 @@ -292,6 +291,8 @@ executable brig-schema V51 V52 V53 + V54 + V55 build-depends: base @@ -396,7 +397,6 @@ executable brig-integration , imports , lens >= 3.9 , lens-aeson >= 1.0 - , lifted-async >= 0.9.3 , mime >= 0.4 , mtl >= 2.1 , network diff --git a/services/brig/schema/src/Main.hs b/services/brig/schema/src/Main.hs index c02325ee1b8..7f2777196f3 100644 --- a/services/brig/schema/src/Main.hs +++ b/services/brig/schema/src/Main.hs @@ -50,6 +50,8 @@ import qualified V50 import qualified V51 import qualified V52 import qualified V53 +import qualified V54 +import qualified V55 main :: IO () main = do @@ -100,4 +102,6 @@ main = do , V51.migration , V52.migration , V53.migration + , V54.migration + , V55.migration ] `finally` close l diff --git a/services/brig/schema/src/V54.hs b/services/brig/schema/src/V54.hs new file mode 100644 index 00000000000..f0d3d42abb6 --- /dev/null +++ b/services/brig/schema/src/V54.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} + +module V54 (migration) where + +import Imports +import Cassandra.Schema +import Text.RawString.QQ + +migration :: Migration +migration = Migration 54 "Add metadata to team invitations" $ do + schema' [r| alter table team_invitation add created_by uuid; |] diff --git a/services/brig/schema/src/V55.hs b/services/brig/schema/src/V55.hs new file mode 100644 index 00000000000..9c8a008cdb7 --- /dev/null +++ b/services/brig/schema/src/V55.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} + +module V55 (migration) where + +import Imports +import Cassandra.Schema +import Text.RawString.QQ + +migration :: Migration +migration = Migration 55 "Add optional role to team invitations" $ do + schema' [r| alter table team_invitation add role int; |] diff --git a/services/brig/src/Brig/API/User.hs b/services/brig/src/Brig/API/User.hs index ea6e4e28631..6c8099a6809 100644 --- a/services/brig/src/Brig/API/User.hs +++ b/services/brig/src/Brig/API/User.hs @@ -73,6 +73,7 @@ import Brig.Password import Brig.Types import Brig.Types.Code (Timeout (..)) import Brig.Types.Intra +import Brig.Types.Team.Invitation (inCreatedAt, inCreatedBy) import Brig.User.Auth.Cookie (revokeAllCookies) import Brig.User.Email import Brig.User.Event @@ -256,7 +257,9 @@ createUser new@NewUser{..} = do ok <- lift $ Data.claimKey uk uid unless ok $ throwE $ DuplicateUserKey uk - added <- lift $ Intra.addTeamMember uid (Team.iiTeam ii) + let minvmeta :: (Maybe (UserId, UTCTimeMillis), Team.Role) + minvmeta = ((, inCreatedAt inv) <$> inCreatedBy inv, Team.inRole inv) + added <- lift $ Intra.addTeamMember uid (Team.iiTeam ii) minvmeta unless added $ throwE TooManyTeamMembers lift $ do @@ -270,7 +273,7 @@ createUser new@NewUser{..} = do addUserToTeamSSO :: UserAccount -> TeamId -> UserIdentity -> ExceptT CreateUserError AppIO CreateUserTeam addUserToTeamSSO account tid ident = do let uid = userId (accountUser account) - added <- lift $ Intra.addTeamMember uid tid + added <- lift $ Intra.addTeamMember uid tid (Nothing, Team.defaultRole) unless added $ throwE TooManyTeamMembers lift $ do diff --git a/services/brig/src/Brig/App.hs b/services/brig/src/Brig/App.hs index 963962850bc..0a98f81790c 100644 --- a/services/brig/src/Brig/App.hs +++ b/services/brig/src/Brig/App.hs @@ -118,7 +118,7 @@ import qualified System.Logger as Log import qualified System.Logger.Class as LC schemaVersion :: Int32 -schemaVersion = 53 +schemaVersion = 55 ------------------------------------------------------------------------------- -- Environment diff --git a/services/brig/src/Brig/Data/Connection.hs b/services/brig/src/Brig/Data/Connection.hs index d2d91755a7b..45f61db608a 100644 --- a/services/brig/src/Brig/Data/Connection.hs +++ b/services/brig/src/Brig/Data/Connection.hs @@ -23,7 +23,7 @@ import Brig.Data.Types as T import Brig.Types import Brig.Types.Intra import Cassandra -import UnliftIO.Async.Extended (mapMPooled) +import UnliftIO.Async (pooledMapConcurrentlyN_) import Data.Conduit ((.|), runConduit) import Data.Id import Data.Json.Util (UTCTimeMillis, toUTCTimeMillis) @@ -108,7 +108,7 @@ countConnections u r = do deleteConnections :: UserId -> AppIO () deleteConnections u = do runConduit $ paginateC contactsSelect (paramsP Quorum (Identity u) 100) x1 - .| C.mapM_ (void . mapMPooled 16 delete) + .| C.mapM_ (pooledMapConcurrentlyN_ 16 delete) retry x1 . write connectionClear $ params Quorum (Identity u) where delete (other, _status) = write connectionDelete $ params Quorum (other, u) diff --git a/services/brig/src/Brig/IO/Intra.hs b/services/brig/src/Brig/IO/Intra.hs index 1b7eca83434..5c927e98edd 100644 --- a/services/brig/src/Brig/IO/Intra.hs +++ b/services/brig/src/Brig/IO/Intra.hs @@ -51,7 +51,6 @@ import Bilge.RPC import Brig.App import Brig.Data.Connection (lookupContactList) import Brig.Data.User (lookupUsers) -import Brig.API.Error (incorrectPermissions) import Brig.API.Types import Brig.RPC import Brig.Types @@ -59,7 +58,6 @@ import Brig.Types.Intra import Brig.User.Event import Control.Lens (view, (.~), (?~), (^.)) import Control.Lens.Prism (_Just) -import Control.Monad.Catch import Control.Retry import Data.Aeson hiding (json) import Data.ByteString.Conversion @@ -68,6 +66,7 @@ import Data.Json.Util ((#)) import Data.List1 (List1, list1, singleton) import Data.List.Split (chunksOf) import Data.Range +import Data.Json.Util (UTCTimeMillis) import Galley.Types (Connect (..), Conversation) import Gundeck.Types.Push.V2 import Network.HTTP.Types.Method @@ -530,31 +529,22 @@ rmClient u c = do ------------------------------------------------------------------------------- -- Team Management -addTeamMember :: UserId -> TeamId -> AppIO Bool -addTeamMember u tid = do +addTeamMember :: UserId -> TeamId -> (Maybe (UserId, UTCTimeMillis), Team.Role) -> AppIO Bool +addTeamMember u tid (minvmeta, role) = do debug $ remote "galley" . msg (val "Adding member to team") - permissions <- maybe (throwM incorrectPermissions) - return - (Team.newPermissions perms perms) - rs <- galleyRequest POST (req permissions) + rs <- galleyRequest POST req return $ case Bilge.statusCode rs of 200 -> True _ -> False where - perms = Set.fromList [ Team.CreateConversation - , Team.DeleteConversation - , Team.AddConversationMember - , Team.RemoveConversationMember - , Team.GetTeamConversations - , Team.GetMemberPermissions - ] - t prm = Team.newNewTeamMember $ Team.newTeamMember u prm - req p = paths ["i", "teams", toByteString' tid, "members"] + prm = Team.rolePermissions role + bdy = Team.newNewTeamMember $ Team.newTeamMember u prm minvmeta + req = paths ["i", "teams", toByteString' tid, "members"] . header "Content-Type" "application/json" . zUser u . expect [status200, status403] - . lbytes (encode $ t p) + . lbytes (encode bdy) createTeam :: UserId -> Team.BindingNewTeam -> TeamId -> AppIO CreateUserTeam createTeam u t@(Team.BindingNewTeam bt) teamid = do diff --git a/services/brig/src/Brig/Provider/API.hs b/services/brig/src/Brig/Provider/API.hs index 197c6072d5b..f3aaeefa9b6 100644 --- a/services/brig/src/Brig/Provider/API.hs +++ b/services/brig/src/Brig/Provider/API.hs @@ -31,7 +31,7 @@ import Brig.Types.Client import Brig.Types.User (publicProfile, User (..), Pict (..)) import Brig.Types.Provider import Brig.Types.Search -import UnliftIO.Async.Extended (mapMPooled) +import UnliftIO.Async (pooledMapConcurrentlyN_) import Control.Lens (view, (^.)) import Control.Error (throwE) import Control.Exception.Enclosed (handleAny) @@ -599,7 +599,7 @@ finishDeleteService pid sid = do let tags = unsafeRange (serviceTags svc) name = serviceName svc runConduit $ User.lookupServiceUsers pid sid - .| C.mapM_ (void . mapMPooled 16 kick) + .| C.mapM_ (pooledMapConcurrentlyN_ 16 kick) RPC.removeServiceConn pid sid DB.deleteService pid sid name tags where @@ -699,7 +699,7 @@ updateServiceWhitelist (uid ::: con ::: tid ::: req) = do -- conversations lift $ runConduit $ User.lookupServiceUsersForTeam pid sid tid - .| C.mapM_ (void . mapMPooled 16 (\(bid, cid) -> + .| C.mapM_ (pooledMapConcurrentlyN_ 16 (\(bid, cid) -> deleteBot uid (Just con) bid cid)) DB.deleteServiceWhitelist (Just tid) pid sid return (setStatus status200 empty) diff --git a/services/brig/src/Brig/Team/API.hs b/services/brig/src/Brig/Team/API.hs index f914596faa9..28f293f6c37 100644 --- a/services/brig/src/Brig/Team/API.hs +++ b/services/brig/src/Brig/Team/API.hs @@ -1,8 +1,9 @@ -{-# LANGUAGE LambdaCase #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeOperators #-} module Brig.Team.API where @@ -16,7 +17,7 @@ import Brig.Data.UserKey (userEmailKey) import Brig.Email import Brig.Options (setMaxTeamSize, setTeamInvitationTimeout) import Brig.Team.Email -import Brig.Team.Util (ensurePermissions) +import Brig.Team.Util (ensurePermissions, ensurePermissionToAddUser) import Brig.Types.Team.Invitation import Brig.Types.User (InvitationCode, emailIdentity) import Brig.Types.Intra (AccountStatus (..)) @@ -162,10 +163,12 @@ getInvitationCode (_ ::: t ::: r) = do createInvitation :: JSON ::: UserId ::: TeamId ::: Request -> Handler Response createInvitation (_ ::: uid ::: tid ::: req) = do - body <- parseJsonBody req + body :: InvitationRequest <- parseJsonBody req idt <- maybe (throwStd noIdentity) return =<< lift (fetchUserIdentity uid) from <- maybe (throwStd noEmail) return (emailIdentity idt) - ensurePermissions uid tid [Team.AddTeamMember] + let inviteePerms = Team.rolePermissions inviteeRole + inviteeRole = fromMaybe Team.defaultRole . irRole $ body + ensurePermissionToAddUser uid tid inviteePerms email <- either (const $ throwStd invalidEmail) return (validateEmail (irEmail body)) let uk = userEmailKey email blacklisted <- lift $ Blacklist.exists uk @@ -178,12 +181,12 @@ createInvitation (_ ::: uid ::: tid ::: req) = do user <- lift $ Data.lookupKey uk case user of Just _ -> throwStd emailExists - Nothing -> doInvite email from (irLocale body) + Nothing -> doInvite inviteeRole email from (irLocale body) where - doInvite to from lc = lift $ do + doInvite role to from lc = lift $ do now <- liftIO =<< view currentTime timeout <- setTeamInvitationTimeout <$> view settings - (newInv, code) <- DB.insertInvitation tid to now timeout + (newInv, code) <- DB.insertInvitation tid role to now (Just uid) timeout void $ sendInvitationMail to tid from code lc return . setStatus status201 . loc (inInvitation newInv) $ json newInv diff --git a/services/brig/src/Brig/Team/DB.hs b/services/brig/src/Brig/Team/DB.hs index 30c56b0f14c..598fd2d4c20 100644 --- a/services/brig/src/Brig/Team/DB.hs +++ b/services/brig/src/Brig/Team/DB.hs @@ -1,7 +1,8 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ViewPatterns #-} module Brig.Team.DB @@ -29,8 +30,9 @@ import Brig.Options import Brig.Types.Common import Brig.Types.User import Brig.Types.Team.Invitation +import Galley.Types.Teams (Role) import Cassandra -import UnliftIO.Async.Extended (mapMPooled) +import UnliftIO.Async (pooledMapConcurrentlyN_) import Data.Id import Data.Conduit ((.|), runConduit) import Data.Json.Util (UTCTimeMillis, toUTCTimeMillis) @@ -41,6 +43,7 @@ import Data.Time.Clock import OpenSSL.Random (randBytes) import qualified Data.Conduit.List as C +import qualified Galley.Types.Teams as Team mkInvitationCode :: IO InvitationCode mkInvitationCode = InvitationCode . encodeBase64Url <$> randBytes 24 @@ -56,33 +59,35 @@ data InvitationInfo = InvitationInfo insertInvitation :: MonadClient m => TeamId + -> Role -> Email -> UTCTime + -> Maybe UserId -> Timeout -- ^ The timeout for the invitation code. -> m (Invitation, InvitationCode) -insertInvitation t email (toUTCTimeMillis -> now) timeout = do +insertInvitation t role email (toUTCTimeMillis -> now) minviter timeout = do iid <- liftIO mkInvitationId code <- liftIO mkInvitationCode - let inv = Invitation t iid email now + let inv = Invitation t role iid email now minviter retry x5 $ batch $ do setType BatchLogged setConsistency Quorum - addPrepQuery cqlInvitation (t, iid, code, email, now, round timeout) + addPrepQuery cqlInvitation (t, role, iid, code, email, now, minviter, round timeout) addPrepQuery cqlInvitationInfo (code, t, iid, round timeout) return (inv, code) where cqlInvitationInfo :: PrepQuery W (InvitationCode, TeamId, InvitationId, Int32) () cqlInvitationInfo = "INSERT INTO team_invitation_info (code, team, id) VALUES (?, ?, ?) USING TTL ?" - cqlInvitation :: PrepQuery W (TeamId, InvitationId, InvitationCode, Email, UTCTimeMillis, Int32) () - cqlInvitation = "INSERT INTO team_invitation (team, id, code, email, created_at) VALUES (?, ?, ?, ?, ?) USING TTL ?" + cqlInvitation :: PrepQuery W (TeamId, Role, InvitationId, InvitationCode, Email, UTCTimeMillis, Maybe UserId, Int32) () + cqlInvitation = "INSERT INTO team_invitation (team, role, id, code, email, created_at, created_by) VALUES (?, ?, ?, ?, ?, ?, ?) USING TTL ?" lookupInvitation :: MonadClient m => TeamId -> InvitationId -> m (Maybe Invitation) lookupInvitation t r = fmap toInvitation <$> retry x1 (query1 cqlInvitation (params Quorum (t, r))) where - cqlInvitation :: PrepQuery R (TeamId, InvitationId) (TeamId, InvitationId, Email, UTCTime) - cqlInvitation = "SELECT team, id, email, created_at FROM team_invitation WHERE team = ? AND id = ?" + cqlInvitation :: PrepQuery R (TeamId, InvitationId) (TeamId, Maybe Role, InvitationId, Email, UTCTimeMillis, Maybe UserId) + cqlInvitation = "SELECT team, role, id, email, created_at, created_by FROM team_invitation WHERE team = ? AND id = ?" lookupInvitationByCode :: MonadClient m => InvitationCode -> m (Maybe Invitation) lookupInvitationByCode i = lookupInvitationInfo i >>= \case @@ -107,11 +112,11 @@ lookupInvitations team start (fromRange -> size) = do toResult more invs = cassandraResultPage $ emptyPage { result = invs , hasMore = more } - cqlSelect :: PrepQuery R (Identity TeamId) (TeamId, InvitationId, Email, UTCTime) - cqlSelect = "SELECT team, id, email, created_at FROM team_invitation WHERE team = ? ORDER BY id ASC" + cqlSelect :: PrepQuery R (Identity TeamId) (TeamId, Maybe Role, InvitationId, Email, UTCTimeMillis, Maybe UserId) + cqlSelect = "SELECT team, role, id, email, created_at, created_by FROM team_invitation WHERE team = ? ORDER BY id ASC" - cqlSelectFrom :: PrepQuery R (TeamId, InvitationId) (TeamId, InvitationId, Email, UTCTime) - cqlSelectFrom = "SELECT team, id, email, created_at FROM team_invitation WHERE team = ? AND id > ? ORDER BY id ASC" + cqlSelectFrom :: PrepQuery R (TeamId, InvitationId) (TeamId, Maybe Role, InvitationId, Email, UTCTimeMillis, Maybe UserId) + cqlSelectFrom = "SELECT team, role, id, email, created_at, created_by FROM team_invitation WHERE team = ? AND id > ? ORDER BY id ASC" deleteInvitation :: MonadClient m => TeamId -> InvitationId -> m () deleteInvitation t i = do @@ -135,7 +140,7 @@ deleteInvitations :: (MonadClient m, MonadUnliftIO m) => TeamId -> m () deleteInvitations t = liftClient $ runConduit $ paginateC cqlSelect (paramsP Quorum (Identity t) 100) x1 - .| C.mapM_ (void . mapMPooled 16 (deleteInvitation t . runIdentity)) + .| C.mapM_ (pooledMapConcurrentlyN_ 16 (deleteInvitation t . runIdentity)) where cqlSelect :: PrepQuery R (Identity TeamId) (Identity InvitationId) cqlSelect = "SELECT id FROM team_invitation WHERE team = ? ORDER BY id ASC" @@ -158,6 +163,7 @@ countInvitations t = fromMaybe 0 . fmap runIdentity <$> cqlSelect :: PrepQuery R (Identity TeamId) (Identity Int64) cqlSelect = "SELECT count(*) FROM team_invitation WHERE team = ?" --- Helper -toInvitation :: (TeamId, InvitationId, Email, UTCTime) -> Invitation -toInvitation (t, i, e, toUTCTimeMillis -> tm) = Invitation t i e tm +-- | brig used to not store the role, so for migration we allow this to be empty and fill in the +-- default here. +toInvitation :: (TeamId, Maybe Role, InvitationId, Email, UTCTimeMillis, Maybe UserId) -> Invitation +toInvitation (t, r, i, e, tm, minviter) = Invitation t (fromMaybe Team.defaultRole r) i e tm minviter diff --git a/services/brig/src/Brig/Team/Util.hs b/services/brig/src/Brig/Team/Util.hs index ba183fc3a12..734a4969de1 100644 --- a/services/brig/src/Brig/Team/Util.hs +++ b/services/brig/src/Brig/Team/Util.hs @@ -36,3 +36,18 @@ ensurePermissions u t perms = do check :: Maybe TeamMember -> Bool check (Just m) = and $ hasPermission m <$> perms check Nothing = False + +-- | Privilege escalation detection (make sure no `RoleMember` user creates a `RoleOwner`). +-- +-- There is some code duplication with 'Galley.API.Teams.ensureNotElevated'. +ensurePermissionToAddUser :: UserId -> TeamId -> Permissions -> ExceptT Error AppIO () +ensurePermissionToAddUser u t inviteePerms = do + minviter <- lift $ Intra.getTeamMember u t + unless (check minviter) $ + throwStd insufficientTeamPermissions + where + check :: Maybe TeamMember -> Bool + check (Just inviter) = + hasPermission inviter AddTeamMember && + and (mayGrantPermission inviter <$> Set.toList (inviteePerms ^. self)) + check Nothing = False diff --git a/services/brig/src/Brig/Whitelist.hs b/services/brig/src/Brig/Whitelist.hs index 4c62bd98e48..17e2c062a3e 100644 --- a/services/brig/src/Brig/Whitelist.hs +++ b/services/brig/src/Brig/Whitelist.hs @@ -26,8 +26,9 @@ data Whitelist = Whitelist instance FromJSON Whitelist verify :: (MonadIO m, MonadMask m, MonadHttp m) => Whitelist -> Either Email Phone -> m Bool -verify (Whitelist url user pass) key = - recovering x3 httpHandlers . const $ do +verify (Whitelist url user pass) key = if isKnownDomain key + then return True + else recovering x3 httpHandlers . const $ do rq <- parseRequest $ unpack url rsp <- get' rq $ req (encodeUtf8 user) (encodeUtf8 pass) case statusCode rsp of @@ -36,6 +37,9 @@ verify (Whitelist url user pass) key = _ -> throwM $ HttpExceptionRequest rq (StatusCodeException (rsp { responseBody = () }) mempty) where + isKnownDomain (Left e) = emailDomain e == "wire.com" + isKnownDomain _ = False + urlEmail = queryItem "email" . encodeUtf8 . fromEmail urlPhone = queryItem "mobile" . encodeUtf8 . fromPhone diff --git a/services/brig/test/integration/API/Team.hs b/services/brig/test/integration/API/Team.hs index dc9b9b8254c..cc99edcc655 100644 --- a/services/brig/test/integration/API/Team.hs +++ b/services/brig/test/integration/API/Team.hs @@ -1,5 +1,6 @@ {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE ViewPatterns #-} @@ -16,12 +17,14 @@ import Brig.Types.Team.Invitation import Brig.Types.User.Auth import Brig.Types.Intra import Control.Arrow ((&&&)) -import UnliftIO.Async.Extended - (mapConcurrently_, replicateConcurrently, forPooled, replicatePooled) +import UnliftIO.Async + (mapConcurrently_, replicateConcurrently, pooledForConcurrentlyN_) import Control.Lens ((^.), view) import Data.Aeson import Data.ByteString.Conversion import Data.Id hiding (client) +import Data.Json.Util (toUTCTimeMillis) +import Data.Time (getCurrentTime, addUTCTime) import Network.HTTP.Client (Manager) import Test.Tasty hiding (Timeout) import Test.Tasty.HUnit @@ -44,12 +47,13 @@ newtype TeamSizeLimit = TeamSizeLimit Word16 tests :: Maybe Opt.Opts -> Manager -> Brig -> Cannon -> Galley -> AWS.Env -> IO TestTree tests conf m b c g aws = do tl <- optOrEnv (TeamSizeLimit . Opt.setMaxTeamSize . Opt.optSettings) conf (TeamSizeLimit . read) "TEAM_MAX_SIZE" - it <- optOrEnv (Opt.setTeamInvitationTimeout . Opt.optSettings) conf read "TEAM_INVITATION_TIMEOUT" + it <- optOrEnv (Opt.setTeamInvitationTimeout . Opt.optSettings) conf read "TEAM_INVITATION_TIMEOUT" return $ testGroup "team" [ testGroup "invitation" [ test m "post /teams/:tid/invitations - 201" $ testInvitationEmail b g , test m "post /teams/:tid/invitations - 403 no permission" $ testInvitationNoPermission b g , test m "post /teams/:tid/invitations - 403 too many pending" $ testInvitationTooManyPending b g tl + , test m "post /teams/:tid/invitations - roles" $ testInvitationRoles b g , test' aws m "post /register - 201 accepted" $ testInvitationEmailAccepted b g , test' aws m "post /register user & team - 201 accepted" $ testCreateTeam b g aws , test' aws m "post /register user & team - 201 preverified" $ testCreateTeamPreverified b g aws @@ -69,7 +73,7 @@ tests conf m b c g aws = do , test m "post /connections - 403 (same binding team)" $ testConnectionSameTeam b g ] , testGroup "search" - [ test m "post /register members are unsearchable" $ testNonSearchableDefault b g + [ test m "post /register members are unsearchable" $ testNonSearchableDefault b g ] , testGroup "sso" [ test m "post /i/users - 201 internal-SSO" $ testCreateUserInternalSSO b g @@ -87,7 +91,7 @@ testUpdateEvents brig galley cannon = do inviteeEmail <- randomEmail -- invite and register Bob - let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing + let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing Nothing inv <- decodeBody =<< postInvitation brig tid alice invite Just inviteeCode <- getInvitationCode brig tid (inInvitation inv) rsp2 <- post (brig . path "/register" @@ -118,15 +122,15 @@ testInvitationEmail :: Brig -> Galley -> Http () testInvitationEmail brig galley = do (inviter, tid) <- createUserWithTeam brig galley invitee <- randomEmail - let invite = InvitationRequest invitee (Name "Bob") Nothing + let invite = InvitationRequest invitee (Name "Bob") Nothing Nothing void $ postInvitation brig tid inviter invite testInvitationTooManyPending :: Brig -> Galley -> TeamSizeLimit -> Http () testInvitationTooManyPending brig galley (TeamSizeLimit limit) = do (inviter, tid) <- createUserWithTeam brig galley emails <- replicateConcurrently (fromIntegral limit) randomEmail - let invite e = InvitationRequest e (Name "Bob") Nothing - void $ forPooled 16 emails $ \email -> + let invite e = InvitationRequest e (Name "Bob") Nothing Nothing + pooledForConcurrentlyN_ 16 emails $ \email -> postInvitation brig tid inviter (invite email) e <- randomEmail -- TODO: If this test takes longer to run than `team-invitation-timeout`, then some of the @@ -135,12 +139,55 @@ testInvitationTooManyPending brig galley (TeamSizeLimit limit) = do const 403 === statusCode const (Just "too-many-team-invitations") === fmap Error.label . decodeBody +-- | Admins can invite external partners, but not owners. +testInvitationRoles :: HasCallStack => Brig -> Galley -> Http () +testInvitationRoles brig galley = do + (owner, tid) <- createUserWithTeam brig galley + + let registerInvite :: Invitation -> Email -> Http UserId + registerInvite inv invemail = do + Just inviteeCode <- getInvitationCode brig tid (inInvitation inv) + rsp <- post (brig . path "/register" + . contentJson + . body (accept invemail inviteeCode)) + decodeBody rsp + pure invitee + + -- owner creates a member alice. + alice :: UserId <- do + aliceEmail <- randomEmail + let invite = InvitationRequest aliceEmail (Name "Alice") Nothing (Just Team.RoleAdmin) + inv :: Invitation <- decodeBody =<< postInvitation brig tid owner invite + registerInvite inv aliceEmail + + -- alice creates a external partner bob. success! bob only has externalPartner perms. + do + bobEmail <- randomEmail + let invite = InvitationRequest bobEmail (Name "Bob") Nothing (Just Team.RoleExternalPartner) + inv :: Invitation <- decodeBody =<< (postInvitation brig tid alice invite Galley -> Http () testInvitationEmailAccepted brig galley = do (inviter, tid) <- createUserWithTeam brig galley inviteeEmail <- randomEmail - let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing + let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing Nothing inv <- decodeBody =<< postInvitation brig tid inviter invite + let invmeta = Just (inviter, inCreatedAt inv) Just inviteeCode <- getInvitationCode brig tid (inInvitation inv) rsp2 <- post (brig . path "/register" . contentJson @@ -153,7 +200,8 @@ testInvitationEmailAccepted brig galley = do login brig (defEmailLogin email2) PersistentCookie !!! const 200 === statusCode -- Verify that the user is part of the team mem <- getTeamMember invitee tid galley - liftIO $ assertBool "Member not part of the team" (invitee == mem ^. Team.userId) + liftIO $ assertEqual "Member not part of the team" invitee (mem ^. Team.userId) + liftIO $ assertEqual "Member has no/wrong invitation metadata" invmeta (mem ^. Team.invitation) conns <- listConnections invitee brig liftIO $ assertBool "User should have no connections" (null (clConnections conns) && not (clHasMore conns)) @@ -171,7 +219,7 @@ testCreateTeam brig galley aws = do liftIO $ assertBool "Member not part of the team" (uid == mem ^. Team.userId) -- Verify that the user cannot send invitations before activating their account inviteeEmail <- randomEmail - let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing + let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing Nothing postInvitation brig (team^.Team.teamId) uid invite !!! const 403 === statusCode -- Verify that the team is still in status "pending" team2 <- getTeam galley (team^.Team.teamId) @@ -207,7 +255,7 @@ testCreateTeamPreverified brig galley aws = do liftIO $ assertEqual "Team should already be active" Team.Active (Team.tdStatus team2) -- Verify that the user can already send invitations before activating their account inviteeEmail <- randomEmail - let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing + let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing Nothing postInvitation brig (team^.Team.teamId) uid invite !!! const 201 === statusCode testInvitationNoPermission :: Brig -> Galley -> Http () @@ -215,7 +263,7 @@ testInvitationNoPermission brig galley = do (_, tid) <- createUserWithTeam brig galley alice <- userId <$> randomUser brig email <- randomEmail - let invite = InvitationRequest email (Name "Bob") Nothing + let invite = InvitationRequest email (Name "Bob") Nothing Nothing postInvitation brig tid alice invite !!! do const 403 === statusCode const (Just "insufficient-permissions") === fmap Error.label . decodeBody @@ -245,6 +293,7 @@ testInvitationCodeExists :: Brig -> Galley -> Http () testInvitationCodeExists brig galley = do email <- randomEmail (uid, tid) <- createUserWithTeam brig galley + let invite email_ = InvitationRequest email_ (Name "Bob") Nothing Nothing rsp <- postInvitation brig tid uid (invite email) decodeBody rsp @@ -261,8 +310,6 @@ testInvitationCodeExists brig galley = do post (brig . path "/register" . contentJson . body (accept email2 invCode)) !!! do const 400 === statusCode const (Just "invalid-invitation-code") === fmap Error.label . decodeBody - where - invite email = InvitationRequest email (Name "Bob") Nothing testInvitationInvalidCode :: Brig -> Http () testInvitationInvalidCode brig = do @@ -316,11 +363,11 @@ testInvitationMutuallyExclusive brig = do testInvitationTooManyMembers :: Brig -> Galley -> TeamSizeLimit -> Http () testInvitationTooManyMembers brig galley (TeamSizeLimit limit) = do (creator, tid) <- createUserWithTeam brig galley - void $ replicatePooled 16 (fromIntegral limit - 1) $ + pooledForConcurrentlyN_ 16 [1..limit-1] $ \_ -> createTeamMember brig galley creator tid Team.fullPermissions em <- randomEmail - let invite = InvitationRequest em (Name "Bob") Nothing + let invite = InvitationRequest em (Name "Bob") Nothing Nothing inv <- decodeBody =<< postInvitation brig tid creator invite Just inviteeCode <- getInvitationCode brig tid (inInvitation inv) post (brig . path "/register" @@ -329,35 +376,51 @@ testInvitationTooManyMembers brig galley (TeamSizeLimit limit) = do const 403 === statusCode const (Just "too-many-team-members") === fmap Error.label . decodeBody -testInvitationPaging :: Brig -> Galley -> Http () +testInvitationPaging :: HasCallStack => Brig -> Galley -> Http () testInvitationPaging brig galley = do - (u, tid) <- createUserWithTeam brig galley - replicateM_ total $ do - email <- randomEmail - postInvitation brig tid u (invite email) !!! const 201 === statusCode - foldM_ (next u tid 2) (0, Nothing) [2,2,1,0] - foldM_ (next u tid total) (0, Nothing) [total,0] - where - total = 5 + before <- liftIO $ toUTCTimeMillis . addUTCTime (-1) <$> getCurrentTime + (uid, tid) <- createUserWithTeam brig galley - next :: UserId -> TeamId -> Int -> (Int, Maybe InvitationId) -> Int -> Http (Int, Maybe InvitationId) - next u t step (count, start) n = do - let count' = count + step - let range = queryRange (toByteString' <$> start) (Just step) - r <- get (brig . paths ["teams", toByteString' t, "invitations"] . zUser u . range) invs) - liftIO $ assertEqual "has more" (Just (count' < total)) more - return . (count',) $ invs >>= fmap inInvitation . listToMaybe . reverse + let total = 5 + invite email = InvitationRequest email (Name "Bob") Nothing Nothing - invite email = InvitationRequest email (Name "Bob") Nothing + emails <- replicateM total $ do + email <- randomEmail + postInvitation brig tid uid (invite email) !!! const 201 === statusCode + pure email + after <- liftIO $ toUTCTimeMillis . addUTCTime 1 <$> getCurrentTime + + let next :: HasCallStack => Int -> (Int, Maybe InvitationId) -> Int -> Http (Int, Maybe InvitationId) + next step (count, start) actualPageLen = do + let count' = count + step + let range = queryRange (toByteString' <$> start) (Just step) + r <- get (brig . paths ["teams", toByteString' tid, "invitations"] . zUser uid . range) decodeBody r + liftIO $ assertEqual "page size" actualPageLen (length invs) + liftIO $ assertEqual "has more" (count' < total) more + liftIO $ validateInv `mapM_` invs + return (count', fmap inInvitation . listToMaybe . reverse $ invs) + + validateInv :: Invitation -> Assertion + validateInv inv = do + assertEqual "tid" tid (inTeam inv) + assertBool "email" (inIdentity inv `elem` emails) + -- (the output list is not ordered chronologically and emails are unique, so we just + -- check whether the email is one of the valid ones.) + assertBool "timestamp" (inCreatedAt inv > before && inCreatedAt inv < after) + assertEqual "uid" (Just uid) (inCreatedBy inv) + -- not checked: @inInvitation inv :: InvitationId@ + + foldM_ (next 2) (0, Nothing) [2,2,1,0] + foldM_ (next total) (0, Nothing) [total,0] + foldM_ (next (total + 1)) (0, Nothing) [total,0] testInvitationInfo :: Brig -> Galley -> Http () testInvitationInfo brig galley = do email <- randomEmail (uid, tid) <- createUserWithTeam brig galley - let invite = InvitationRequest email (Name "Bob") Nothing + let invite = InvitationRequest email (Name "Bob") Nothing Nothing inv <- decodeBody =<< postInvitation brig tid uid invite Just invCode <- getInvitationCode brig tid (inInvitation inv) @@ -376,7 +439,7 @@ testInvitationInfoExpired :: Brig -> Galley -> Opt.Timeout -> Http () testInvitationInfoExpired brig galley timeout = do email <- randomEmail (uid, tid) <- createUserWithTeam brig galley - let invite = InvitationRequest email (Name "Bob") Nothing + let invite = InvitationRequest email (Name "Bob") Nothing Nothing inv <- decodeBody =<< postInvitation brig tid uid invite -- Note: This value must be larger than the option passed as `team-invitation-timeout` awaitExpiry (round timeout + 5) tid (inInvitation inv) @@ -403,7 +466,7 @@ testSuspendTeam brig galley = do (inviter, tid) <- createUserWithTeam brig galley -- invite and register invitee - let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing + let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing Nothing inv <- decodeBody =<< postInvitation brig tid inviter invite Just inviteeCode <- getInvitationCode brig tid (inInvitation inv) rsp2 <- post (brig . path "/register" @@ -413,7 +476,7 @@ testSuspendTeam brig galley = do let Just (invitee, Just email) = (userId &&& userEmail) <$> decodeBody rsp2 -- invite invitee2 (don't register) - let invite2 = InvitationRequest inviteeEmail2 (Name "Bob") Nothing + let invite2 = InvitationRequest inviteeEmail2 (Name "Bob") Nothing Nothing inv2 <- decodeBody =<< postInvitation brig tid inviter invite2 Just _ <- getInvitationCode brig tid (inInvitation inv2) diff --git a/services/brig/test/integration/API/Team/Util.hs b/services/brig/test/integration/API/Team/Util.hs index 3ed00a091bf..6a74c815e4a 100644 --- a/services/brig/test/integration/API/Team/Util.hs +++ b/services/brig/test/integration/API/Team/Util.hs @@ -77,7 +77,7 @@ createTeamMember brig galley owner tid perm = do inviteAndRegisterUser :: UserId -> TeamId -> Brig -> Http User inviteAndRegisterUser u tid brig = do inviteeEmail <- randomEmail - let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing + let invite = InvitationRequest inviteeEmail (Name "Bob") Nothing Nothing inv <- decodeBody =<< postInvitation brig tid u invite Just inviteeCode <- getInvitationCode brig tid (inInvitation inv) rspInvitee <- post (brig . path "/register" @@ -85,9 +85,9 @@ inviteAndRegisterUser u tid brig = do . body (accept inviteeEmail inviteeCode)) getSelfProfile brig (userId invitee) - liftIO $ assertBool "Team ID in self profile and team table do not match" (selfTeam == Just tid) + liftIO $ assertEqual "Team ID in self profile and team table do not match" selfTeam (Just tid) return invitee updatePermissions :: UserId -> TeamId -> (UserId, Team.Permissions) -> Galley -> Http () @@ -99,7 +99,7 @@ updatePermissions from tid (to, perm) galley = . Bilge.json changeMember ) !!! const 200 === statusCode where - changeMember = Team.newNewTeamMember $ Team.newTeamMember to perm + changeMember = Team.newNewTeamMember $ Team.newTeamMember to perm Nothing createTeamConv :: HasCallStack => Galley -> TeamId -> UserId -> [UserId] -> Maybe Milliseconds -> Http ConvId createTeamConv g tid u us mtimer = do diff --git a/services/brig/test/integration/API/User/Auth.hs b/services/brig/test/integration/API/User/Auth.hs index f81b3d763e3..356c9e342ad 100644 --- a/services/brig/test/integration/API/User/Auth.hs +++ b/services/brig/test/integration/API/User/Auth.hs @@ -10,7 +10,7 @@ import Brig.Types.Intra import Brig.Types.User import Brig.Types.User.Auth import Brig.ZAuth (ZAuth, runZAuth) -import UnliftIO.Async.Extended hiding (wait) +import UnliftIO.Async hiding (wait) import Control.Lens ((^?), set) import Data.Aeson import Data.Aeson.Lens @@ -219,7 +219,8 @@ testThrottleLogins conf b = do u <- randomUser b let Just e = userEmail u -- Login exactly that amount of times, as fast as possible - void $ replicatePooled 8 l (login b (defEmailLogin e) SessionCookie) + pooledForConcurrentlyN_ 8 [1..l] $ \_ -> + login b (defEmailLogin e) SessionCookie -- Login once more. This should fail! x <- login b (defEmailLogin e) SessionCookie return () - _ -> reset counter s 0 >> loop + perhapsPingMsg -> do + reset counter s 0 + when (isAppLevelPing perhapsPingMsg) sendAppLevelPong + loop adjustPingFreq p = case fromByteString (toStrict p) of Just i | i > 0 && i < maxPingInterval -> reset pingFreq s (i # Second) _ -> return () + -- control messages are internal to the browser that manages the websockets + -- . + -- since the browser may silently lose a websocket connection, wire clients are allowed send + -- 'DataMessage' pings as well, and we respond with a 'DataMessage' pong to allow them to + -- reliably decide whether the connection is still alive. + isAppLevelPing = \case + (DataMessage _ _ _ (Text "ping" _)) -> True + (DataMessage _ _ _ (Binary "ping")) -> True + _ -> False + sendAppLevelPong = sendMsgIO "pong" ws + rejectOnError :: PendingConnection -> HandshakeException -> IO a rejectOnError p x = do let f lb mg = toStrict . encode $ Error status400 lb mg diff --git a/services/cannon/src/Cannon/WS.hs b/services/cannon/src/Cannon/WS.hs index 01455a338bf..3e35e5b33bb 100644 --- a/services/cannon/src/Cannon/WS.hs +++ b/services/cannon/src/Cannon/WS.hs @@ -15,6 +15,7 @@ module Cannon.WS , isRemoteRegistered , registerRemote , sendMsg + , sendMsgIO , Clock , mkClock @@ -203,7 +204,11 @@ sendMsg :: L.ByteString -> Key -> Websocket -> WS () sendMsg m k c = do let kb = key2bytes k trace $ client kb . msg (val "sendMsg: \"" +++ L.take 128 m +++ val "...\"") - liftIO $ recoverAll retry3x $ const $ sendBinaryData (connection c) m + liftIO $ sendMsgIO m c + +sendMsgIO :: L.ByteString -> Websocket -> IO () +sendMsgIO m c = do + recoverAll retry3x $ const $ sendBinaryData (connection c) m close :: Key -> Websocket -> WS () close k c = do diff --git a/services/cargohold/Makefile b/services/cargohold/Makefile index 676b58300cd..1f863fb8c30 100644 --- a/services/cargohold/Makefile +++ b/services/cargohold/Makefile @@ -10,6 +10,8 @@ DEB := dist/$(NAME)_$(VERSION)+$(BUILD)_amd64.deb DEB_IT := dist/$(NAME)-integration_$(VERSION)+$(BUILD)_amd64.deb SDIST := dist/$(NAME)-$(VERSION).tar.gz EXECUTABLES := $(NAME) $(NAME)-integration +DOCKER_USER ?= quay.io/wire +DOCKER_TAG ?= local guard-%: @ if [ "${${*}}" = "" ]; then \ @@ -91,11 +93,12 @@ integration-%: fast .PHONY: docker docker: $(foreach executable,$(EXECUTABLES),\ - docker build -t $(executable) \ - -f ../../build/alpine/Dockerfile \ - --build-arg service=$(NAME) \ + docker build -t $(DOCKER_USER)/$(executable):$(DOCKER_TAG) \ + -f ../../build/alpine/Dockerfile.executable \ --build-arg executable=$(executable) \ - ../.. \ + ../.. && \ + docker tag $(DOCKER_USER)/$(executable):$(DOCKER_TAG) $(DOCKER_USER)/$(executable):latest && \ + if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/$(executable):$(DOCKER_TAG); docker push $(DOCKER_USER)/$(executable):latest; fi \ ;) .PHONY: time diff --git a/services/galley/Makefile b/services/galley/Makefile index b4b31d5ea15..80f6780a3d9 100644 --- a/services/galley/Makefile +++ b/services/galley/Makefile @@ -12,6 +12,8 @@ DEB_IT := $(NAME)-integration_$(VERSION)+$(BUILD)_amd64.deb DEB_SCHEMA := $(NAME)-schema_$(VERSION)+$(BUILD)_amd64.deb DEB_JOURNALER := $(NAME)-journaler_$(VERSION)+$(BUILD)_amd64.deb EXECUTABLES := $(NAME) $(NAME)-integration $(NAME)-schema $(NAME)-journaler +DOCKER_USER ?= quay.io/wire +DOCKER_TAG ?= local guard-%: @ if [ "${${*}}" = "" ]; then \ @@ -120,11 +122,12 @@ db-migrate: fast .PHONY: docker docker: $(foreach executable,$(EXECUTABLES),\ - docker build -t $(executable) \ - -f ../../build/alpine/Dockerfile \ - --build-arg service=$(NAME) \ + docker build -t $(DOCKER_USER)/$(executable):$(DOCKER_TAG) \ + -f ../../build/alpine/Dockerfile.executable \ --build-arg executable=$(executable) \ - ../.. \ + ../.. && \ + docker tag $(DOCKER_USER)/$(executable):$(DOCKER_TAG) $(DOCKER_USER)/$(executable):latest && \ + if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/$(executable):$(DOCKER_TAG); docker push $(DOCKER_USER)/$(executable):latest; fi \ ;) .PHONY: time diff --git a/services/galley/galley.cabal b/services/galley/galley.cabal index 6d00e6df3e6..aa222768fb4 100644 --- a/services/galley/galley.cabal +++ b/services/galley/galley.cabal @@ -88,7 +88,6 @@ library , http-types >= 0.8 , imports , lens >= 4.4 - , lifted-async >= 0.2 , lifted-base >= 0.2 , metrics-wai >= 0.4 , monad-control >= 1.0 @@ -168,6 +167,7 @@ executable galley-schema V27 V28 V29 + V30 build-depends: base @@ -225,7 +225,6 @@ executable galley-integration , imports , lens , lens-aeson - , lifted-async , mtl , network , optparse-applicative @@ -274,7 +273,6 @@ executable galley-journaler , cassandra-util , lens , text - , lifted-async , mtl , galley-types , proto-lens diff --git a/services/galley/schema/src/Main.hs b/services/galley/schema/src/Main.hs index 51210f55d58..cfcd431784b 100644 --- a/services/galley/schema/src/Main.hs +++ b/services/galley/schema/src/Main.hs @@ -18,6 +18,7 @@ import qualified V26 import qualified V27 import qualified V28 import qualified V29 +import qualified V30 main :: IO () main = do @@ -34,6 +35,7 @@ main = do , V27.migration , V28.migration , V29.migration + , V30.migration -- When adding migrations here, don't forget to update -- 'schemaVersion' in Galley.Data ] diff --git a/services/galley/schema/src/V30.hs b/services/galley/schema/src/V30.hs new file mode 100644 index 00000000000..52dc9a6b0af --- /dev/null +++ b/services/galley/schema/src/V30.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} + +module V30 (migration) where + +import Imports +import Cassandra.Schema +import Text.RawString.QQ + +migration :: Migration +migration = Migration 30 "Add invitation metadata to team_member" $ do + schema' [r| ALTER TABLE team_member ADD invited_by uuid; |] + schema' [r| ALTER TABLE team_member ADD invited_at timestamp; |] diff --git a/services/galley/src/Galley/API/Create.hs b/services/galley/src/Galley/API/Create.hs index 8feb3b50c0f..06615097597 100644 --- a/services/galley/src/Galley/API/Create.hs +++ b/services/galley/src/Galley/API/Create.hs @@ -76,23 +76,29 @@ createRegularGroupConv zusr zcon (NewConvUnmanaged body) = do createTeamGroupConv :: UserId -> ConnId -> ConvTeamInfo -> NewConv -> Galley Response createTeamGroupConv zusr zcon tinfo body = do name <- rangeCheckedMaybe (newConvName body) - mems <- Data.teamMembers (cnvTeamId tinfo) - ensureAccessRole (accessRole body) (newConvUsers body) (Just mems) - void $ permissionCheck zusr CreateConversation mems - uids <- + teamMems <- Data.teamMembers (cnvTeamId tinfo) + ensureAccessRole (accessRole body) (newConvUsers body) (Just teamMems) + void $ permissionCheck zusr CreateConversation teamMems + otherConvMems <- if cnvManaged tinfo then do - let uu = filter (/= zusr) $ map (view userId) mems - checkedConvSize uu + let otherConvMems = filter (/= zusr) $ map (view userId) teamMems + checkedConvSize otherConvMems else do - void $ permissionCheck zusr AddConversationMember mems - uu <- checkedConvSize (newConvUsers body) - ensureConnected zusr (notTeamMember (fromConvSize uu) mems) - pure uu - conv <- Data.createConversation zusr name (access body) (accessRole body) uids (newConvTeam body) (newConvMessageTimer body) (newConvReceiptMode body) + otherConvMems <- checkedConvSize (newConvUsers body) + -- In teams we don't have 1:1 conversations, only regular + -- conversations. We want users without the 'AddRemoveConvMember' + -- permission to still be able to create regular conversations, + -- therefore we check for 'AddRemoveConvMember' only if there are + -- going to be more than two users in the conversation. + when (length (fromConvSize otherConvMems) > 1) $ do + void $ permissionCheck zusr AddRemoveConvMember teamMems + ensureConnected zusr (notTeamMember (fromConvSize otherConvMems) teamMems) + pure otherConvMems + conv <- Data.createConversation zusr name (access body) (accessRole body) otherConvMems (newConvTeam body) (newConvMessageTimer body) (newConvReceiptMode body) now <- liftIO getCurrentTime let d = Teams.EdConvCreate (Data.convId conv) let e = newEvent Teams.ConvCreate (cnvTeamId tinfo) now & eventData .~ Just d - let notInConv = Set.fromList (map (view userId) mems) \\ Set.fromList (zusr : fromConvSize uids) + let notInConv = Set.fromList (map (view userId) teamMems) \\ Set.fromList (zusr : fromConvSize otherConvMems) for_ (newPush zusr (TeamEvent e) (map userRecipient (Set.toList notInConv))) push1 notifyCreatedConversation (Just now) zusr (Just zcon) conv conversationResponse status201 zusr conv diff --git a/services/galley/src/Galley/API/Teams.hs b/services/galley/src/Galley/API/Teams.hs index 012609a80fb..82dab64eda2 100644 --- a/services/galley/src/Galley/API/Teams.hs +++ b/services/galley/src/Galley/API/Teams.hs @@ -98,7 +98,7 @@ lookupTeam zusr tid = do createNonBindingTeam :: UserId ::: ConnId ::: Request ::: JSON ::: JSON -> Galley Response createNonBindingTeam (zusr::: zcon ::: req ::: _) = do NonBindingNewTeam body <- fromBody req invalidPayload - let owner = newTeamMember zusr fullPermissions + let owner = newTeamMember zusr fullPermissions Nothing let others = filter ((zusr /=) . view userId) . maybe [] fromRange $ body^.newTeamMembers @@ -111,7 +111,7 @@ createNonBindingTeam (zusr::: zcon ::: req ::: _) = do createBindingTeam :: UserId ::: TeamId ::: Request ::: JSON ::: JSON -> Galley Response createBindingTeam (zusr ::: tid ::: req ::: _) = do BindingNewTeam body <- fromBody req invalidPayload - let owner = newTeamMember zusr fullPermissions + let owner = newTeamMember zusr fullPermissions Nothing team <- Data.createTeam (Just tid) zusr (body^.newTeamName) (body^.newTeamIcon) (body^.newTeamIconKey) Binding finishCreateTeam team owner [] Nothing @@ -209,8 +209,8 @@ getTeamMembers (zusr::: tid ::: _) = do case findTeamMember zusr mems of Nothing -> throwM noTeamMember Just m -> do - let withPerm = m `hasPermission` GetMemberPermissions - pure (json $ teamMemberListJson withPerm (newTeamMemberList mems)) + let withPerms = (m `canSeePermsOf`) + pure (json $ teamMemberListJson withPerms (newTeamMemberList mems)) getTeamMember :: UserId ::: TeamId ::: UserId ::: JSON -> Galley Response getTeamMember (zusr ::: tid ::: uid ::: _) = do @@ -218,19 +218,20 @@ getTeamMember (zusr ::: tid ::: uid ::: _) = do case findTeamMember zusr mems of Nothing -> throwM noTeamMember Just m -> do - let withPerm = m `hasPermission` GetMemberPermissions - let member = findTeamMember uid mems - maybe (throwM teamMemberNotFound) (pure . json . teamMemberJson withPerm) member + let withPerms = (m `canSeePermsOf`) + let member = findTeamMember uid mems + maybe (throwM teamMemberNotFound) + (pure . json . teamMemberJson withPerms) member uncheckedGetTeamMember :: TeamId ::: UserId ::: JSON -> Galley Response uncheckedGetTeamMember (tid ::: uid ::: _) = do mem <- Data.teamMember tid uid >>= ifNothing teamMemberNotFound - return . json $ teamMemberJson True mem + return $ json mem uncheckedGetTeamMembers :: TeamId ::: JSON -> Galley Response uncheckedGetTeamMembers (tid ::: _) = do mems <- Data.teamMembers tid - return . json $ teamMemberListJson True (newTeamMemberList mems) + return . json $ newTeamMemberList mems addTeamMember :: UserId ::: ConnId ::: TeamId ::: Request ::: JSON ::: JSON -> Galley Response addTeamMember (zusr ::: zcon ::: tid ::: req ::: _) = do @@ -292,23 +293,17 @@ updateTeamMember (zusr ::: zcon ::: tid ::: req ::: _) = do -- inform members of the team about the change -- some (privileged) users will be informed about which change was applied - let privilege = flip hasPermission GetMemberPermissions - (privileged, unprivileged) = partition privilege updatedMembers + let privileged = filter (`canSeePermsOf` targetMember) updatedMembers mkUpdate = EdMemberUpdate targetId privilegedUpdate = mkUpdate $ Just targetPermissions - unPrivilegedUpdate = mkUpdate Nothing privilegedRecipients = membersToRecipients Nothing privileged - unPrivilegedRecipients = membersToRecipients Nothing unprivileged now <- liftIO getCurrentTime let ePriv = newEvent MemberUpdate tid now & eventData ?~ privilegedUpdate - eUPriv = newEvent MemberUpdate tid now & eventData ?~ unPrivilegedUpdate -- push to all members (user is privileged) let pushPriv = newPush zusr (TeamEvent ePriv) $ privilegedRecipients - pushUnPriv = newPush zusr (TeamEvent eUPriv) $ unPrivilegedRecipients for_ pushPriv $ \p -> push1 $ p & pushConn .~ Just zcon - for_ pushUnPriv $ \p -> push1 $ p & pushConn .~ Just zcon pure empty deleteTeamMember :: UserId ::: ConnId ::: TeamId ::: UserId ::: Request ::: Maybe JSON ::: JSON -> Galley Response @@ -486,4 +481,4 @@ getBindingTeamId zusr = withBindingTeam zusr $ pure . json getBindingTeamMembers :: UserId -> Galley Response getBindingTeamMembers zusr = withBindingTeam zusr $ \tid -> do members <- Data.teamMembers tid - pure $ json $ teamMemberListJson True (newTeamMemberList members) + pure . json $ newTeamMemberList members diff --git a/services/galley/src/Galley/API/Update.hs b/services/galley/src/Galley/API/Update.hs index 32a60384148..6e4cf7d7b2c 100644 --- a/services/galley/src/Galley/API/Update.hs +++ b/services/galley/src/Galley/API/Update.hs @@ -1,11 +1,13 @@ {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} +{-# LANGUAGE ViewPatterns #-} module Galley.API.Update ( -- * Managing Conversations @@ -66,6 +68,7 @@ import Galley.Types.Bot import Galley.Types.Clients (Clients) import Galley.Types.Teams hiding (EventType (..), EventData (..), Event) import Galley.Validation +import Gundeck.Types.Push.V2 (RecipientClients(..)) import Network.HTTP.Types import Network.Wai import Network.Wai.Predicate hiding (_1, _2, setStatus, failure) @@ -122,7 +125,8 @@ updateConversationAccess (usr ::: zcon ::: cnv ::: req ::: _ ) = do ensureGroupConv conv -- Team conversations incur another round of checks case Data.convTeam conv of - Just tid -> checkTeamConv tid + Just tid -> checkTeamConv tid >> + permissionCheckTeamConv usr cnv ModifyConvMetadata Nothing -> when (targetRole == TeamAccessRole) $ throwM invalidTargetAccess -- When there is no update to be done, we return 204; otherwise we go -- with 'uncheckedUpdateConversationAccess', which will potentially kick @@ -147,7 +151,7 @@ updateConversationAccess (usr ::: zcon ::: cnv ::: req ::: _ ) = do throwM invalidManagedConvOp -- Access mode change might result in members being removed from the -- conversation, so the user must have the necessary permission flag - void $ permissionCheck usr RemoveConversationMember tMembers + void $ permissionCheck usr AddRemoveConvMember tMembers uncheckedUpdateConversationAccess :: ConversationAccessUpdate -> UserId -> ConnId -> Data.Conversation @@ -208,6 +212,7 @@ uncheckedUpdateConversationAccess body usr zcon conv (currentAccess, targetAcces updateConversationReceiptMode :: UserId ::: ConnId ::: ConvId ::: Request ::: JSON ::: JSON -> Galley Response updateConversationReceiptMode (usr ::: zcon ::: cnv ::: req ::: _ ::: _) = do ConversationReceiptModeUpdate target <- fromBody req invalidPayload + permissionCheckTeamConv usr cnv ModifyConvMetadata (bots, users) <- botsAndUsers <$> Data.members cnv current <- Data.lookupReceiptMode cnv if current == Just target @@ -232,6 +237,7 @@ updateConversationMessageTimer (usr ::: zcon ::: cnv ::: req ::: _ ) = do conv <- Data.conversation cnv >>= ifNothing convNotFound ensureGroupConv conv traverse_ ensureTeamMember $ Data.convTeam conv -- only team members can change the timer + permissionCheckTeamConv usr cnv ModifyConvMetadata let currentTimer = Data.convMessageTimer conv if currentTimer == messageTimer then return $ empty & setStatus status204 @@ -360,7 +366,7 @@ addMembers (zusr ::: zcon ::: cid ::: req ::: _) = do teamConvChecks tid newUsers conv = do tms <- Data.teamMembers tid ensureAccessRole (Data.convAccessRole conv) newUsers (Just tms) - void $ permissionCheck zusr AddConversationMember tms + void $ permissionCheck zusr AddRemoveConvMember tms tcv <- Data.teamConversation tid cid when (maybe True (view managedConversation) tcv) $ throwM noAddToManaged @@ -417,7 +423,7 @@ removeMember (zusr ::: zcon ::: cid ::: victim) = do teamConvChecks tid = do unless (zusr == victim) $ - void $ permissionCheck zusr RemoveConversationMember =<< Data.teamMembers tid + void $ permissionCheck zusr AddRemoveConvMember =<< Data.teamMembers tid tcv <- Data.teamConversation tid cid when (maybe False (view managedConversation) tcv) $ throwM (invalidOp "Users can not be removed from managed conversations.") @@ -485,7 +491,7 @@ newMessage usr con cnv msg now (m, c, t) ~(toBots, toUsers) = } conv = fromMaybe (selfConv $ memId m) cnv -- use recipient's client's self conversation on broadcast e = Event OtrMessageAdd conv usr now (Just $ EdOtrMessage o) - r = recipient m & recipientClients .~ [c] + r = recipient m & recipientClients .~ (RecipientClientsSome $ singleton c) in case newBotMember m of Just b -> ((b,e):toBots, toUsers) Nothing -> @@ -499,6 +505,7 @@ newMessage usr con cnv msg now (m, c, t) ~(toBots, toUsers) = updateConversation :: UserId ::: ConnId ::: ConvId ::: Request ::: JSON -> Galley Response updateConversation (zusr ::: zcon ::: cnv ::: req ::: _) = do body <- fromBody req invalidPayload + permissionCheckTeamConv zusr cnv ModifyConvMetadata alive <- Data.isConvAlive cnv unless alive $ do Data.deleteConversation cnv @@ -567,7 +574,7 @@ addBot (zusr ::: zcon ::: req ::: _) = do teamConvChecks cid tid = do tms <- Data.teamMembers tid - void $ permissionCheck zusr AddConversationMember tms + void $ permissionCheck zusr AddRemoveConvMember tms tcv <- Data.teamConversation tid cid when (maybe True (view managedConversation) tcv) $ throwM noAddToManaged diff --git a/services/galley/src/Galley/API/Util.hs b/services/galley/src/Galley/API/Util.hs index 9bfc4fe2e5a..b3fcf1eb4b6 100644 --- a/services/galley/src/Galley/API/Util.hs +++ b/services/galley/src/Galley/API/Util.hs @@ -1,7 +1,8 @@ {-# LANGUAGE DataKinds #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiWayIf #-} -{-# LANGUAGE ViewPatterns #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} module Galley.API.Util where @@ -72,6 +73,9 @@ bindingTeamMembers tid = do Binding -> Data.teamMembers tid NonBinding -> throwM nonBindingTeam +-- | Pick a team member with a given user id from some team members. If the filter comes up empty, +-- throw 'noTeamMember'; if the user is found and does not have the given permission, throw +-- 'operationDenied'. Otherwise, return the found user. permissionCheck :: Foldable m => UserId -> Perm -> m TeamMember -> Galley TeamMember permissionCheck u p t = case find ((u ==) . view userId) t of @@ -81,6 +85,15 @@ permissionCheck u p t = pure m Nothing -> throwM noTeamMember +-- | If the conversation is in a team, throw iff zusr is a team member and does not have named +-- permission. If the conversation is not in a team, do nothing (no error). +permissionCheckTeamConv :: UserId -> ConvId -> Perm -> Galley () +permissionCheckTeamConv zusr cnv perm = Data.conversation cnv >>= \case + Just cnv' -> case Data.convTeam cnv' of + Just tid -> void $ permissionCheck zusr perm =<< Data.teamMembers tid + Nothing -> pure () + Nothing -> throwM convNotFound + -- | Try to accept a 1-1 conversation, promoting connect conversations as appropriate. acceptOne2One :: UserId -> Data.Conversation -> Maybe ConnId -> Galley Data.Conversation acceptOne2One usr conv conn = case Data.convType conv of diff --git a/services/galley/src/Galley/Data.hs b/services/galley/src/Galley/Data.hs index 9aa988c84d1..dea741dec63 100644 --- a/services/galley/src/Galley/Data.hs +++ b/services/galley/src/Galley/Data.hs @@ -2,6 +2,7 @@ {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE ViewPatterns #-} @@ -86,12 +87,14 @@ import Cassandra import Cassandra.Util import Control.Arrow (second) import Control.Lens hiding ((<|)) +import Control.Monad.Catch (MonadThrow) import Data.ByteString.Conversion hiding (parser) import Data.Id -import Data.Range -import Data.List.Split (chunksOf) +import Data.Json.Util (UTCTimeMillis(..)) import Data.List1 (List1, list1, singleton) +import Data.List.Split (chunksOf) import Data.Misc (Milliseconds) +import Data.Range import Data.Time.Clock import Data.UUID.V4 (nextRandom) import Galley.App @@ -125,7 +128,7 @@ import qualified System.Logger.Class as Log newtype ResultSet a = ResultSet { page :: Page a } schemaVersion :: Int32 -schemaVersion = 29 +schemaVersion = 30 -- | Insert a conversation code insertCode :: MonadClient m => Code -> m () @@ -181,13 +184,21 @@ teamConversations :: MonadClient m => TeamId -> m [TeamConversation] teamConversations t = map (uncurry newTeamConversation) <$> retry x1 (query Cql.selectTeamConvs (params Quorum (Identity t))) -teamMembers :: MonadClient m => TeamId -> m [TeamMember] -teamMembers t = map (uncurry newTeamMember) <$> +teamMembers :: forall m. (MonadThrow m, MonadClient m) => TeamId -> m [TeamMember] +teamMembers t = mapM newTeamMember' =<< retry x1 (query Cql.selectTeamMembers (params Quorum (Identity t))) + where + newTeamMember' :: (UserId, Permissions, Maybe UserId, Maybe UTCTimeMillis) -> m TeamMember + newTeamMember' (uid, perms, minvu, minvt) = + newTeamMemberRaw uid perms minvu minvt -teamMember :: MonadClient m => TeamId -> UserId -> m (Maybe TeamMember) -teamMember t u = fmap (newTeamMember u . runIdentity) <$> - retry x1 (query1 Cql.selectTeamMember (params Quorum (t, u))) +teamMember :: forall m. (MonadThrow m, MonadClient m) => TeamId -> UserId -> m (Maybe TeamMember) +teamMember t u = newTeamMember' u =<< retry x1 (query1 Cql.selectTeamMember (params Quorum (t, u))) + where + newTeamMember' :: UserId -> Maybe (Permissions, Maybe UserId, Maybe UTCTimeMillis) -> m (Maybe TeamMember) + newTeamMember' _ Nothing = pure Nothing + newTeamMember' uid (Just (perms, minvu, minvt)) = + Just <$> newTeamMemberRaw uid perms minvu minvt userTeams :: MonadClient m => UserId -> m [TeamId] userTeams u = map runIdentity <$> @@ -244,7 +255,12 @@ addTeamMember t m = retry x5 $ batch $ do setType BatchLogged setConsistency Quorum - addPrepQuery Cql.insertTeamMember (t, m^.userId, m^.permissions) + addPrepQuery Cql.insertTeamMember ( t + , m ^. userId + , m ^. permissions + , m ^? invitation . _Just . _1 + , m ^? invitation . _Just . _2 + ) addPrepQuery Cql.insertUserTeam (m^.userId, t) updateTeamMember :: MonadClient m => TeamId -> UserId -> Permissions -> m () diff --git a/services/galley/src/Galley/Data/Instances.hs b/services/galley/src/Galley/Data/Instances.hs index 3618497829d..f90c91a06ba 100644 --- a/services/galley/src/Galley/Data/Instances.hs +++ b/services/galley/src/Galley/Data/Instances.hs @@ -10,15 +10,12 @@ module Galley.Data.Instances where import Imports import Cassandra.CQL -import Control.Lens ((^.)) import Control.Error (note) import Galley.Types import Galley.Types.Bot import Galley.Types.Teams import Galley.Types.Teams.Intra -import qualified Data.Set - deriving instance Cql ServiceToken deriving instance Cql MutedStatus deriving instance Cql ReceiptMode @@ -74,22 +71,6 @@ instance Cql AccessRole where fromCql _ = fail "AccessRole value: int expected" -instance Cql Permissions where - ctype = Tagged $ UdtColumn "permissions" [("self", BigIntColumn), ("copy", BigIntColumn)] - - toCql p = - let f = CqlBigInt . fromIntegral . permsToInt in - CqlUdt [("self", f (p^.self)), ("copy", f (p^.copy))] - - fromCql (CqlUdt p) = do - let f = intToPerms . fromIntegral :: Int64 -> Data.Set.Set Perm - s <- note "missing 'self' permissions" ("self" `lookup` p) >>= fromCql - d <- note "missing 'copy' permissions" ("copy" `lookup` p) >>= fromCql - r <- note "invalid permissions" (newPermissions (f s) (f d)) - pure r - fromCql _ = fail "permissions: udt expected" - - instance Cql ConvTeamInfo where ctype = Tagged $ UdtColumn "teaminfo" [("teamid", UuidColumn), ("managed", BooleanColumn)] diff --git a/services/galley/src/Galley/Data/Queries.hs b/services/galley/src/Galley/Data/Queries.hs index c2fae0764b2..b4ef4cf02b1 100644 --- a/services/galley/src/Galley/Data/Queries.hs +++ b/services/galley/src/Galley/Data/Queries.hs @@ -10,6 +10,7 @@ import Brig.Types.Code import Cassandra as C hiding (Value) import Cassandra.Util import Data.Id +import Data.Json.Util import Data.Misc import Galley.Data.Types import Galley.Types hiding (Conversation) @@ -39,11 +40,11 @@ selectTeamConv = "select managed from team_conv where team = ? and conv = ?" selectTeamConvs :: PrepQuery R (Identity TeamId) (ConvId, Bool) selectTeamConvs = "select conv, managed from team_conv where team = ? order by conv" -selectTeamMember :: PrepQuery R (TeamId, UserId) (Identity Permissions) -selectTeamMember = "select perms from team_member where team = ? and user = ?" +selectTeamMember :: PrepQuery R (TeamId, UserId) (Permissions, Maybe UserId, Maybe UTCTimeMillis) +selectTeamMember = "select perms, invited_by, invited_at from team_member where team = ? and user = ?" -selectTeamMembers :: PrepQuery R (Identity TeamId) (UserId, Permissions) -selectTeamMembers = "select user, perms from team_member where team = ? order by user" +selectTeamMembers :: PrepQuery R (Identity TeamId) (UserId, Permissions, Maybe UserId, Maybe UTCTimeMillis) +selectTeamMembers = "select user, perms, invited_by, invited_at from team_member where team = ? order by user" selectUserTeams :: PrepQuery R (Identity UserId) (Identity TeamId) selectUserTeams = "select team from user_team where user = ? order by team" @@ -66,8 +67,8 @@ insertTeamConv = "insert into team_conv (team, conv, managed) values (?, ?, ?)" deleteTeamConv :: PrepQuery W (TeamId, ConvId) () deleteTeamConv = "delete from team_conv where team = ? and conv = ?" -insertTeamMember :: PrepQuery W (TeamId, UserId, Permissions) () -insertTeamMember = "insert into team_member (team, user, perms) values (?, ?, ?)" +insertTeamMember :: PrepQuery W (TeamId, UserId, Permissions, Maybe UserId, Maybe UTCTimeMillis) () +insertTeamMember = "insert into team_member (team, user, perms, invited_by, invited_at) values (?, ?, ?, ?, ?)" deleteTeamMember :: PrepQuery W (TeamId, UserId) () deleteTeamMember = "delete from team_member where team = ? and user = ?" diff --git a/services/galley/src/Galley/Intra/Push.hs b/services/galley/src/Galley/Intra/Push.hs index 97df572576e..a2be67a683d 100644 --- a/services/galley/src/Galley/Intra/Push.hs +++ b/services/galley/src/Galley/Intra/Push.hs @@ -41,6 +41,7 @@ import Bilge.Retry import Galley.App import Galley.Options import Galley.Types +import Gundeck.Types.Push.V2 (RecipientClients(..)) import Control.Lens (makeLenses, set, view, (.~), (&), (^.)) import Control.Monad.Catch import Control.Retry @@ -73,16 +74,16 @@ pushEventJson (TeamEvent e) = toJSONObject e data Recipient = Recipient { _recipientUserId :: UserId - , _recipientClients :: [ClientId] + , _recipientClients :: RecipientClients } makeLenses ''Recipient recipient :: Member -> Recipient -recipient m = Recipient (memId m) [] +recipient m = Recipient (memId m) RecipientClientsAll userRecipient :: UserId -> Recipient -userRecipient u = Recipient u [] +userRecipient u = Recipient u RecipientClientsAll data Push = Push { _pushConn :: Maybe ConnId @@ -158,7 +159,7 @@ push ps = do toRecipient p r = Gundeck.recipient (_recipientUserId r) (_pushRoute p) - & Gundeck.recipientClients .~ _recipientClients r + & Gundeck.recipientClients .~ _recipientClients r ----------------------------------------------------------------------------- -- Helpers diff --git a/services/galley/test/integration/API.hs b/services/galley/test/integration/API.hs index 651acfee579..36175fbff07 100644 --- a/services/galley/test/integration/API.hs +++ b/services/galley/test/integration/API.hs @@ -419,12 +419,12 @@ postConvertTeamConv g b c setup = do alice <- randomUser b tid <- createTeamInternal g "foo" alice assertQueue "create team" a tActivate - let p1 = symmPermissions [Teams.AddConversationMember] - bobMem <- flip Teams.newTeamMember p1 <$> randomUser b + let p1 = symmPermissions [Teams.AddRemoveConvMember] + bobMem <- (\u -> Teams.newTeamMember u p1 Nothing) <$> randomUser b addTeamMemberInternal g tid bobMem let bob = bobMem^.Teams.userId assertQueue "team member (bob) join" a $ tUpdate 2 [alice] - daveMem <- flip Teams.newTeamMember p1 <$> randomUser b + daveMem <- (\u -> Teams.newTeamMember u p1 Nothing) <$> randomUser b addTeamMemberInternal g tid daveMem let dave = daveMem^.Teams.userId assertQueue "team member (dave) join" a $ tUpdate 3 [alice] diff --git a/services/galley/test/integration/API/MessageTimer.hs b/services/galley/test/integration/API/MessageTimer.hs index 67ec3c5dc0c..e9111115c1d 100644 --- a/services/galley/test/integration/API/MessageTimer.hs +++ b/services/galley/test/integration/API/MessageTimer.hs @@ -84,7 +84,7 @@ messageTimerChangeGuest g b _ca _ = do -- Create a team and a guest user [owner, member, guest] <- randomUsers b 3 connectUsers b owner (list1 member [guest]) - tid <- createTeam g "team" owner [Teams.newTeamMember member Teams.fullPermissions] + tid <- createTeam g "team" owner [Teams.newTeamMember member Teams.fullPermissions Nothing] -- Create a conversation cid <- createTeamConv g owner tid [member, guest] Nothing Nothing Nothing -- Try to change the timer (as the guest user) and observe failure diff --git a/services/galley/test/integration/API/Teams.hs b/services/galley/test/integration/API/Teams.hs index 86590afaeb2..8df89a99f81 100644 --- a/services/galley/test/integration/API/Teams.hs +++ b/services/galley/test/integration/API/Teams.hs @@ -1,4 +1,5 @@ {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} module API.Teams (tests) where @@ -50,8 +51,9 @@ tests s = testGroup "Teams API" , test s "create multiple binding teams fail" testCreateMulitpleBindingTeams , test s "create binding team with currency" testCreateBindingTeamWithCurrency , test s "create team with members" testCreateTeamWithMembers - , test s "create 1-1 conversation between binding team members (fail)" testCreateOne2OneFailNonBindingTeamMembers - , test s "create 1-1 conversation between binding team members" testCreateOne2OneWithMembers + , test s "create 1-1 conversation between non-binding team members (fail)" testCreateOne2OneFailNonBindingTeamMembers + , test s "create 1-1 conversation between binding team members" (testCreateOne2OneWithMembers RoleMember) + , test s "create 1-1 conversation between binding team members as partner" (testCreateOne2OneWithMembers RoleExternalPartner) , test s "add new team member" testAddTeamMember , test s "add new team member binding teams" testAddTeamMemberCheckBound , test s "add new team member internal" testAddTeamMemberInternal @@ -59,9 +61,12 @@ tests s = testGroup "Teams API" , test s "remove team member (binding, owner has passwd)" (testRemoveBindingTeamMember True) , test s "remove team member (binding, owner has no passwd)" (testRemoveBindingTeamMember False) , test s "add team conversation" testAddTeamConv + , test s "add team conversation as partner (fail)" testAddTeamConvAsExternalPartner , test s "add managed conversation through public endpoint (fail)" testAddManagedConv , test s "add managed team conversation ignores given users" testAddTeamConvWithUsers , test s "add team member to conversation without connection" testAddTeamMemberToConv + , test s "update conversation as member" (testUpdateTeamConv RoleMember) + , test s "update conversation as partner" (testUpdateTeamConv RoleExternalPartner) , test s "delete non-binding team" testDeleteTeam , test s "delete binding team (owner has passwd)" (testDeleteBindingTeam True) , test s "delete binding team (owner has no passwd)" (testDeleteBindingTeam False) @@ -128,9 +133,9 @@ testCreateTeamWithMembers g b c _ = do owner <- Util.randomUser b user1 <- Util.randomUser b user2 <- Util.randomUser b - let pp = Util.symmPermissions [CreateConversation, AddConversationMember] - let m1 = newTeamMember user1 pp - let m2 = newTeamMember user2 pp + let pp = Util.symmPermissions [CreateConversation, AddRemoveConvMember] + let m1 = newTeamMember' pp user1 + let m2 = newTeamMember' pp user2 Util.connectUsers b owner (list1 user1 [user2]) WS.bracketR3 c owner user1 user2 $ \(wsOwner, wsUser1, wsUser2) -> do tid <- Util.createTeam g "foo" owner [m1, m2] @@ -138,7 +143,7 @@ testCreateTeamWithMembers g b c _ = do mem <- Util.getTeamMembers g owner tid liftIO $ do assertEqual "members" - (Set.fromList [newTeamMember owner fullPermissions, m1, m2]) + (Set.fromList [newTeamMember' fullPermissions owner, m1, m2]) (Set.fromList (mem^.teamMembers)) void $ mapConcurrently (checkCreateEvent team) [wsOwner, wsUser1, wsUser2] where @@ -152,10 +157,10 @@ testCreateTeamWithMembers g b c _ = do testCreateOne2OneFailNonBindingTeamMembers :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testCreateOne2OneFailNonBindingTeamMembers g b _ a = do owner <- Util.randomUser b - let p1 = Util.symmPermissions [CreateConversation, AddConversationMember] - let p2 = Util.symmPermissions [CreateConversation, AddConversationMember, AddTeamMember] - mem1 <- flip newTeamMember p1 <$> Util.randomUser b - mem2 <- flip newTeamMember p2 <$> Util.randomUser b + let p1 = Util.symmPermissions [CreateConversation, AddRemoveConvMember] + let p2 = Util.symmPermissions [CreateConversation, AddRemoveConvMember, AddTeamMember] + mem1 <- newTeamMember' p1 <$> Util.randomUser b + mem2 <- newTeamMember' p2 <$> Util.randomUser b Util.connectUsers b owner (list1 (mem1^.userId) [mem2^.userId]) tid <- Util.createTeam g "foo" owner [mem1, mem2] -- Cannot create a 1-1 conversation, not connected and in the same team but not binding @@ -173,13 +178,15 @@ testCreateOne2OneFailNonBindingTeamMembers g b _ a = do const 403 === statusCode const "non-binding-team-members" === (Error.label . Util.decodeBody' "error label") -testCreateOne2OneWithMembers :: HasCallStack => Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () -testCreateOne2OneWithMembers g b c a = do +testCreateOne2OneWithMembers + :: HasCallStack + => Role -- ^ Role of the user who creates the conversation + -> Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () +testCreateOne2OneWithMembers (rolePermissions -> perms) g b c a = do owner <- Util.randomUser b tid <- Util.createTeamInternal g "foo" owner assertQueue "create team" a tActivate - let p1 = Util.symmPermissions [CreateConversation] - mem1 <- flip newTeamMember p1 <$> Util.randomUser b + mem1 <- newTeamMember' perms <$> Util.randomUser b WS.bracketR c (mem1^.userId) $ \wsMem1 -> do Util.addTeamMemberInternal g tid mem1 @@ -197,15 +204,15 @@ testCreateOne2OneWithMembers g b c a = do testAddTeamMember :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testAddTeamMember g b c _ = do owner <- Util.randomUser b - let p1 = Util.symmPermissions [CreateConversation, AddConversationMember] - let p2 = Util.symmPermissions [CreateConversation, AddConversationMember, AddTeamMember] - mem1 <- flip newTeamMember p1 <$> Util.randomUser b - mem2 <- flip newTeamMember p2 <$> Util.randomUser b + let p1 = Util.symmPermissions [CreateConversation, AddRemoveConvMember] + let p2 = Util.symmPermissions [CreateConversation, AddRemoveConvMember, AddTeamMember] + mem1 <- newTeamMember' p1 <$> Util.randomUser b + mem2 <- newTeamMember' p2 <$> Util.randomUser b Util.connectUsers b owner (list1 (mem1^.userId) [mem2^.userId]) Util.connectUsers b (mem1^.userId) (list1 (mem2^.userId) []) tid <- Util.createTeam g "foo" owner [mem1, mem2] - mem3 <- flip newTeamMember p1 <$> Util.randomUser b + mem3 <- newTeamMember' p1 <$> Util.randomUser b let payload = json (newNewTeamMember mem3) Util.connectUsers b (mem1^.userId) (list1 (mem3^.userId) []) Util.connectUsers b (mem2^.userId) (list1 (mem3^.userId) []) @@ -225,7 +232,7 @@ testAddTeamMemberCheckBound g b _ a = do tidBound <- Util.createTeamInternal g "foo" ownerBound assertQueue "create team" a tActivate - rndMem <- flip newTeamMember (Util.symmPermissions []) <$> Util.randomUser b + rndMem <- newTeamMember' (Util.symmPermissions []) <$> Util.randomUser b -- Cannot add any users to bound teams post (g . paths ["teams", toByteString' tidBound, "members"] . zUser ownerBound . zConn "conn" . json (newNewTeamMember rndMem)) !!! const 403 === statusCode @@ -233,7 +240,7 @@ testAddTeamMemberCheckBound g b _ a = do owner <- Util.randomUser b tid <- Util.createTeam g "foo" owner [] -- Cannot add bound users to any teams - let boundMem = newTeamMember ownerBound (Util.symmPermissions []) + let boundMem = newTeamMember' (Util.symmPermissions []) ownerBound post (g . paths ["teams", toByteString' tid, "members"] . zUser owner . zConn "conn" . json (newNewTeamMember boundMem)) !!! const 403 === statusCode @@ -242,7 +249,7 @@ testAddTeamMemberInternal g b c a = do owner <- Util.randomUser b tid <- Util.createTeam g "foo" owner [] let p1 = Util.symmPermissions [GetBilling] -- permissions are irrelevant on internal endpoint - mem1 <- flip newTeamMember p1 <$> Util.randomUser b + mem1 <- newTeamMember' p1 <$> Util.randomUser b WS.bracketRN c [owner, mem1^.userId] $ \[wsOwner, wsMem1] -> do Util.addTeamMemberInternal g tid mem1 @@ -260,10 +267,10 @@ testAddTeamMemberInternal g b c a = do testRemoveTeamMember :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testRemoveTeamMember g b c _ = do owner <- Util.randomUser b - let p1 = Util.symmPermissions [AddConversationMember] - let p2 = Util.symmPermissions [AddConversationMember, RemoveTeamMember] - mem1 <- flip newTeamMember p1 <$> Util.randomUser b - mem2 <- flip newTeamMember p2 <$> Util.randomUser b + let p1 = Util.symmPermissions [AddRemoveConvMember] + let p2 = Util.symmPermissions [AddRemoveConvMember, RemoveTeamMember] + mem1 <- newTeamMember' p1 <$> Util.randomUser b + mem2 <- newTeamMember' p2 <$> Util.randomUser b mext1 <- Util.randomUser b mext2 <- Util.randomUser b mext3 <- Util.randomUser b @@ -308,8 +315,8 @@ testRemoveBindingTeamMember ownerHasPassword g b c a = do tid <- Util.createTeamInternal g "foo" owner assertQueue "create team" a tActivate mext <- Util.randomUser b - let p1 = Util.symmPermissions [AddConversationMember] - mem1 <- flip newTeamMember p1 <$> Util.randomUser b + let p1 = Util.symmPermissions [AddRemoveConvMember] + mem1 <- newTeamMember' p1 <$> Util.randomUser b Util.addTeamMemberInternal g tid mem1 assertQueue "team member join" a $ tUpdate 2 [owner] Util.connectUsers b owner (singleton mext) @@ -377,9 +384,9 @@ testAddTeamConv g b c _ = do owner <- Util.randomUser b extern <- Util.randomUser b - let p = Util.symmPermissions [CreateConversation, AddConversationMember] - mem1 <- flip newTeamMember p <$> Util.randomUser b - mem2 <- flip newTeamMember p <$> Util.randomUser b + let p = Util.symmPermissions [CreateConversation, AddRemoveConvMember] + mem1 <- newTeamMember' p <$> Util.randomUser b + mem2 <- newTeamMember' p <$> Util.randomUser b Util.connectUsers b owner (list1 (mem1^.userId) [extern, mem2^.userId]) tid <- Util.createTeam g "foo" owner [mem2] @@ -424,6 +431,27 @@ testAddTeamConv g b c _ = do WS.assertNoEvent timeout ws +testAddTeamConvAsExternalPartner :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () +testAddTeamConvAsExternalPartner g b _ _ = do + owner <- Util.randomUser b + memMember1 <- newTeamMember' (rolePermissions RoleMember) <$> Util.randomUser b + memMember2 <- newTeamMember' (rolePermissions RoleMember) <$> Util.randomUser b + memExternalPartner <- newTeamMember' (rolePermissions RoleExternalPartner) <$> Util.randomUser b + Util.connectUsers b owner + (list1 (memMember1^.userId) [memExternalPartner^.userId, memMember2^.userId]) + tid <- Util.createTeamInternal g "foo" owner + forM_ [memMember1, memMember2, memExternalPartner] $ \mem -> + Util.addTeamMemberInternal g tid mem + let acc = Just $ Set.fromList [InviteAccess, CodeAccess] + Util.createTeamConvAccessRaw g + (memExternalPartner^.userId) + tid + [memMember1^.userId, memMember2^.userId] + (Just "blaa") acc (Just TeamAccessRole) Nothing + !!! do + const 403 === statusCode + const "operation-denied" === (Error.label . Util.decodeBody' "error label") + testAddManagedConv :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testAddManagedConv g b _c _ = do owner <- Util.randomUser b @@ -456,10 +484,10 @@ testAddTeamConvWithUsers g b _ _ = do testAddTeamMemberToConv :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testAddTeamMemberToConv g b _ _ = do owner <- Util.randomUser b - let p = Util.symmPermissions [AddConversationMember] - mem1 <- flip newTeamMember p <$> Util.randomUser b - mem2 <- flip newTeamMember p <$> Util.randomUser b - mem3 <- flip newTeamMember (Util.symmPermissions []) <$> Util.randomUser b + let p = Util.symmPermissions [AddRemoveConvMember] + mem1 <- newTeamMember' p <$> Util.randomUser b + mem2 <- newTeamMember' p <$> Util.randomUser b + mem3 <- newTeamMember' (Util.symmPermissions []) <$> Util.randomUser b Util.connectUsers b owner (list1 (mem1^.userId) [mem2^.userId, mem3^.userId]) tid <- Util.createTeam g "foo" owner [mem1, mem2, mem3] @@ -470,7 +498,7 @@ testAddTeamMemberToConv g b _ _ = do -- Team member 1 (who is *not* a member of the new conversation) -- can add other team members without requiring a user connection -- thanks to both being team members and member 1 having the permission - -- `AddConversationMember`. + -- `AddRemoveConvMember`. Util.assertNotConvMember g (mem1^.userId) cid Util.postMembers g (mem1^.userId) (list1 (mem2^.userId) []) cid !!! const 200 === statusCode Util.assertConvMember g (mem2^.userId) cid @@ -482,11 +510,25 @@ testAddTeamMemberToConv g b _ _ = do const 403 === statusCode const "operation-denied" === (Error.label . Util.decodeBody' "error label") +testUpdateTeamConv + :: Role -- ^ Role of the user who creates the conversation + -> Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () +testUpdateTeamConv (rolePermissions -> perms) g b _ _ = do + owner <- Util.randomUser b + member <- Util.randomUser b + Util.connectUsers b owner (list1 member []) + tid <- Util.createTeam g "foo" owner [newTeamMember member perms Nothing] + cid <- Util.createTeamConv g owner tid [member] (Just "gossip") Nothing Nothing + resp <- updateTeamConv g member cid (ConversationRename "not gossip") + liftIO $ assertEqual "status" + (if ModifyConvMetadata `elem` (perms ^. self) then 200 else 403) + (statusCode resp) + testDeleteTeam :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testDeleteTeam g b c a = do owner <- Util.randomUser b - let p = Util.symmPermissions [AddConversationMember] - member <- flip newTeamMember p <$> Util.randomUser b + let p = Util.symmPermissions [AddRemoveConvMember] + member <- newTeamMember' p <$> Util.randomUser b extern <- Util.randomUser b Util.connectUsers b owner (list1 (member^.userId) [extern]) @@ -535,12 +577,12 @@ testDeleteBindingTeam ownerHasPassword g b c a = do owner <- Util.randomUser' ownerHasPassword b tid <- Util.createTeamInternal g "foo" owner assertQueue "create team" a tActivate - let p1 = Util.symmPermissions [AddConversationMember] - mem1 <- flip newTeamMember p1 <$> Util.randomUser b - let p2 = Util.symmPermissions [AddConversationMember] - mem2 <- flip newTeamMember p2 <$> Util.randomUser b - let p3 = Util.symmPermissions [AddConversationMember] - mem3 <- flip newTeamMember p3 <$> Util.randomUser b + let p1 = Util.symmPermissions [AddRemoveConvMember] + mem1 <- newTeamMember' p1 <$> Util.randomUser b + let p2 = Util.symmPermissions [AddRemoveConvMember] + mem2 <- newTeamMember' p2 <$> Util.randomUser b + let p3 = Util.symmPermissions [AddRemoveConvMember] + mem3 <- newTeamMember' p3 <$> Util.randomUser b Util.addTeamMemberInternal g tid mem1 assertQueue "team member join 2" a $ tUpdate 2 [owner] Util.addTeamMemberInternal g tid mem2 @@ -603,7 +645,7 @@ testDeleteTeamConv :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testDeleteTeamConv g b c _ = do owner <- Util.randomUser b let p = Util.symmPermissions [DeleteConversation] - member <- flip newTeamMember p <$> Util.randomUser b + member <- newTeamMember' p <$> Util.randomUser b extern <- Util.randomUser b Util.connectUsers b owner (list1 (member^.userId) [extern]) @@ -666,7 +708,7 @@ testUpdateTeam :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testUpdateTeam g b c _ = do owner <- Util.randomUser b let p = Util.symmPermissions [DeleteConversation] - member <- flip newTeamMember p <$> Util.randomUser b + member <- newTeamMember' p <$> Util.randomUser b Util.connectUsers b owner (list1 (member^.userId) []) tid <- Util.createTeam g "foo" owner [member] let bad = object ["name" .= T.replicate 100 "too large"] @@ -703,11 +745,11 @@ testUpdateTeamMember :: Galley -> Brig -> Cannon -> Maybe Aws.Env -> Http () testUpdateTeamMember g b c a = do owner <- Util.randomUser b let p = Util.symmPermissions [SetMemberPermissions] - member <- flip newTeamMember p <$> Util.randomUser b + member <- newTeamMember' p <$> Util.randomUser b Util.connectUsers b owner (list1 (member^.userId) []) tid <- Util.createTeam g "foo" owner [member] -- Must have at least 1 member with full permissions - let changeOwner = newNewTeamMember (newTeamMember owner p) + let changeOwner = newNewTeamMember (newTeamMember' p owner) put ( g . paths ["teams", toByteString' tid, "members"] . zUser (member^.userId) @@ -739,8 +781,8 @@ testUpdateTeamMember g b c a = do ) !!! const 200 === statusCode owner' <- Util.getTeamMember g (member^.userId) tid owner liftIO $ assertEqual "permissions" (owner'^.permissions) (changeOwner^.ntmNewTeamMember.permissions) - -- owner no longer has GetPermissions so can't see actual update - checkTeamMemberUpdateEvent tid owner wsOwner Nothing + -- owner no longer has GetPermissions, but she can still see the update because it's about her! + checkTeamMemberUpdateEvent tid owner wsOwner (pure p) checkTeamMemberUpdateEvent tid owner wsMember (pure p) WS.assertNoEvent timeout [wsOwner, wsMember] assertQueueEmpty a @@ -853,7 +895,7 @@ postCryptoBroadcastMessageJson g b c a = do connectUsers b alice (list1 charlie [dan]) tid1 <- createTeamInternal g "foo" alice assertQueue "" a tActivate - addTeamMemberInternal g tid1 $ newTeamMember bob (symmPermissions []) + addTeamMemberInternal g tid1 $ newTeamMember' (symmPermissions []) bob assertQueue "" a $ tUpdate 2 [alice] _ <- createTeamInternal g "foo" charlie assertQueue "" a tActivate @@ -889,7 +931,7 @@ postCryptoBroadcastMessageJson2 g b c a = do connectUsers b alice (list1 charlie []) tid1 <- createTeamInternal g "foo" alice assertQueue "" a tActivate - addTeamMemberInternal g tid1 $ newTeamMember bob (symmPermissions []) + addTeamMemberInternal g tid1 $ newTeamMember' (symmPermissions []) bob assertQueue "" a $ tUpdate 2 [alice] let t = 3 # Second -- WS receive timeout @@ -942,7 +984,7 @@ postCryptoBroadcastMessageProto g b c a = do connectUsers b alice (list1 charlie [dan]) tid1 <- createTeamInternal g "foo" alice assertQueue "" a tActivate - addTeamMemberInternal g tid1 $ newTeamMember bob (symmPermissions []) + addTeamMemberInternal g tid1 $ newTeamMember' (symmPermissions []) bob assertQueue "" a $ tUpdate 2 [alice] _ <- createTeamInternal g "foo" charlie assertQueue "" a tActivate @@ -999,3 +1041,6 @@ postCryptoBroadcastMessage100OrMaxConns g b c a = do (403, 403, _, [] ) -> error "Need to connect with at least 1 user" (403, 403, _, (x:xs)) -> return (x, xs) (xxx, yyy, _, _ ) -> error ("Unexpected while connecting users: " ++ show xxx ++ " and " ++ show yyy) + +newTeamMember' :: Permissions -> UserId -> TeamMember +newTeamMember' perms uid = newTeamMember uid perms Nothing diff --git a/services/galley/test/integration/API/Util.hs b/services/galley/test/integration/API/Util.hs index 8815feb58d7..f8bb8eed626 100644 --- a/services/galley/test/integration/API/Util.hs +++ b/services/galley/test/integration/API/Util.hs @@ -148,6 +148,17 @@ createTeamConvAccessRaw g u tid us name acc role mtimer = do . json conv ) +updateTeamConv :: Galley -> UserId -> ConvId -> ConversationRename -> Http ResponseLBS +updateTeamConv g zusr convid upd = do + put ( g + . paths ["/conversations", toByteString' convid] + . zUser zusr + . zConn "conn" + . zType "access" + . json upd + ) + +-- | See Note [managed conversations] createManagedConv :: HasCallStack => Galley -> UserId -> TeamId -> [UserId] -> Maybe Text -> Maybe (Set Access) -> Maybe Milliseconds -> Http ConvId createManagedConv g u tid us name acc mtimer = do let tinfo = ConvTeamInfo tid True diff --git a/services/gundeck/Makefile b/services/gundeck/Makefile index 615072f42df..b87def0051b 100644 --- a/services/gundeck/Makefile +++ b/services/gundeck/Makefile @@ -14,6 +14,8 @@ DEB_IT := $(NAME)-integration_$(VERSION)+$(BUILD)_amd64.deb DEB_SCHEMA := $(NAME)-schema_$(VERSION)+$(BUILD)_amd64.deb SDIST := dist/$(NAME)-$(VERSION).tar.gz EXECUTABLES := $(NAME) $(NAME)-integration $(NAME)-schema +DOCKER_USER ?= quay.io/wire +DOCKER_TAG ?= local guard-%: @ if [ "${${*}}" = "" ]; then \ @@ -125,11 +127,12 @@ db-migrate: fast .PHONY: docker docker: $(foreach executable,$(EXECUTABLES),\ - docker build -t $(executable) \ - -f ../../build/alpine/Dockerfile \ - --build-arg service=$(NAME) \ + docker build -t $(DOCKER_USER)/$(executable):$(DOCKER_TAG) \ + -f ../../build/alpine/Dockerfile.executable \ --build-arg executable=$(executable) \ - ../.. \ + ../.. && \ + docker tag $(DOCKER_USER)/$(executable):$(DOCKER_TAG) $(DOCKER_USER)/$(executable):latest && \ + if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/$(executable):$(DOCKER_TAG); docker push $(DOCKER_USER)/$(executable):latest; fi \ ;) .PHONY: time diff --git a/services/gundeck/gundeck.cabal b/services/gundeck/gundeck.cabal index 3454710534e..2414d54f2ac 100644 --- a/services/gundeck/gundeck.cabal +++ b/services/gundeck/gundeck.cabal @@ -22,14 +22,6 @@ library exposed-modules: Gundeck.API - Gundeck.Monad - Gundeck.Options - Gundeck.Push.Native.Serialise - Gundeck.Push.Native.Types - Gundeck.Push.Data - Gundeck.Util.DelayQueue - - other-modules: Gundeck.API.Error Gundeck.Aws Gundeck.Aws.Arn @@ -37,15 +29,21 @@ library Gundeck.Client Gundeck.Env Gundeck.Instances + Gundeck.Monad Gundeck.Notification Gundeck.Notification.Data + Gundeck.Options Gundeck.Presence Gundeck.Presence.Data Gundeck.Push + Gundeck.Push.Data Gundeck.Push.Native + Gundeck.Push.Native.Serialise + Gundeck.Push.Native.Types Gundeck.Push.Websocket Gundeck.React Gundeck.Util + Gundeck.Util.DelayQueue Gundeck.Util.Redis build-depends: @@ -78,7 +76,6 @@ library , http-types >= 0.8 , lens >= 4.4 , lens-aeson >= 1.0 - , lifted-async >= 0.2 , lifted-base >= 0.2 , metrics-core >= 0.2.1 , metrics-wai >= 0.5.7 @@ -170,7 +167,7 @@ executable gundeck-schema test-suite gundeck-tests type: exitcode-stdio-1.0 main-is: Main.hs - ghc-options: -Wall + ghc-options: -Wall -fwarn-tabs -threaded default-language: Haskell2010 default-extensions: NoImplicitPrelude hs-source-dirs: test/unit @@ -178,25 +175,44 @@ test-suite gundeck-tests other-modules: DelayQueue Json + MockGundeck Native + Push build-depends: base , aeson + , aeson-pretty , amazonka , base64-bytestring , bytestring - , HsOpenSSL + , containers + , exceptions + , filepath , gundeck , gundeck-types + , HsOpenSSL + , imports , lens + , MonadRandom + , mtl + , multiset + , network-uri + , QuickCheck + , quickcheck-instances + , random + , scientific + , string-conversions , tasty , tasty-hunit , tasty-quickcheck , text + , tinylog + , transformers , types-common , unordered-containers - , imports + , uuid + , vector benchmark gundeck-bench type: exitcode-stdio-1.0 diff --git a/services/gundeck/src/Gundeck/Aws/Arn.hs b/services/gundeck/src/Gundeck/Aws/Arn.hs index e2a5fd2dbdf..4aff99211b1 100644 --- a/services/gundeck/src/Gundeck/Aws/Arn.hs +++ b/services/gundeck/src/Gundeck/Aws/Arn.hs @@ -46,15 +46,15 @@ import Network.AWS.Data import qualified Data.Text as Text newtype ArnEnv = ArnEnv { arnEnvText :: Text } deriving (Show, ToText, FromJSON) -newtype Account = Account { fromAccount :: Text } deriving (Eq, Show, ToText, FromJSON) -newtype EndpointId = EndpointId Text deriving (Eq, Show, ToText) +newtype Account = Account { fromAccount :: Text } deriving (Eq, Ord, Show, ToText, FromJSON) +newtype EndpointId = EndpointId Text deriving (Eq, Ord, Show, ToText) data SnsArn a = SnsArn { _snsAsText :: !Text , _snsRegion :: !Region , _snsAccount :: !Account , _snsTopic :: !a - } deriving (Eq, Show) + } deriving (Eq, Ord, Show) data AppTopic = AppTopic { _appAsText :: !Text @@ -67,7 +67,7 @@ data EndpointTopic = EndpointTopic , _endpointTransport :: !Transport , _endpointAppName :: !AppName , _endpointId :: !EndpointId - } deriving (Eq, Show) + } deriving (Eq, Ord, Show) type AppArn = SnsArn AppTopic type EndpointArn = SnsArn EndpointTopic diff --git a/services/gundeck/src/Gundeck/Push.hs b/services/gundeck/src/Gundeck/Push.hs index 59eaf47d656..117505c31aa 100644 --- a/services/gundeck/src/Gundeck/Push.hs +++ b/services/gundeck/src/Gundeck/Push.hs @@ -13,6 +13,12 @@ module Gundeck.Push , addToken , listTokens , deleteToken + + -- (for testing) + , pushAll, pushAny + , MonadPushAll(..) + , MonadNativeTargets(..) + , MonadPushAny(..) ) where import Imports @@ -69,101 +75,132 @@ push (req ::: _) = do forM_ exs $ Log.err . msg . (val "Push failed: " +++) . show throwM (Error status500 "server-error" "Server Error") --- | Send individual HTTP requests to cannon for every device and notification. This should go away --- in the future, once 'pushAll' has been proven to always do the same thing. -pushAny :: [Push] -> Gundeck (Either (Seq.Seq SomeException) ()) -pushAny ps = collectErrors <$> mapAsync pushAny' ps +-- | Abstract over all effects in 'pushAll' (for unit testing). +class MonadThrow m => MonadPushAll m where + mpaNotificationTTL :: m NotificationTTL + mpaMkNotificationId :: m NotificationId + mpaListAllPresences :: [UserId] -> m [[Presence]] + mpaBulkPush :: [(Notification, [Presence])] -> m [(NotificationId, [Presence])] + mpaStreamAdd :: NotificationId -> List1 NotificationTarget -> List1 Aeson.Object -> NotificationTTL -> m () + mpaPushNative :: Notification -> Push -> [Address "no-keys"] -> m () + mpaForkIO :: m () -> m () + +instance MonadPushAll Gundeck where + mpaNotificationTTL = view (options . optSettings . setNotificationTTL) + mpaMkNotificationId = mkNotificationId + mpaListAllPresences = Presence.listAll + mpaBulkPush = Web.bulkPush + mpaStreamAdd = Stream.add + mpaPushNative = pushNative + mpaForkIO = void . forkIO + +-- | Abstract over all effects in 'nativeTargets' (for unit testing). +class Monad m => MonadNativeTargets m where + mntgtLogErr :: SomeException -> m () + mntgtLookupAddress :: UserId -> m [Address "no-keys"] -- ^ REFACTOR: rename to 'mntgtLookupAddresses'! + mntgtMapAsync :: (a -> m b) -> [a] -> m [Either SomeException b] + +instance MonadNativeTargets Gundeck where + mntgtLogErr e = Log.err (msg (val "Failed to get native push address: " +++ show e)) + mntgtLookupAddress rcp = Data.lookup rcp Data.One + mntgtMapAsync = mapAsync + +-- | Abstract over all effects in 'pushAny' (for unit testing). +class (MonadPushAll m, MonadNativeTargets m) => MonadPushAny m where + mpyPush :: Notification + -> List1 NotificationTarget + -> UserId + -> Maybe ConnId + -> Set ConnId + -> m [Presence] + +instance MonadPushAny Gundeck where + mpyPush = Web.push + +-- | Send individual HTTP requests to cannon for every device and notification. +-- +-- REFACTOR: This should go away in the future, once 'pushAll' has been proven to always do the same +-- thing. also check what types this removal would make unnecessary. +pushAny + :: forall m. (MonadPushAny m) + => [Push] -> m (Either (Seq.Seq SomeException) ()) +pushAny ps = collectErrors <$> mntgtMapAsync pushAny' ps where collectErrors :: [Either SomeException ()] -> Either (Seq.Seq SomeException) () collectErrors = runAllE . foldMap (AllE . fmapL Seq.singleton) -pushAny' :: Push -> Gundeck () +pushAny' + :: forall m. (MonadPushAny m) + => Push -> m () pushAny' p = do - i <- mkNotificationId + i <- mpaMkNotificationId let pload = p^.pushPayload let notif = Notification i (p^.pushTransient) pload let rcps = fromRange (p^.pushRecipients) let uniq = uncurry list1 $ head &&& tail $ toList rcps let tgts = mkTarget <$> uniq unless (p^.pushTransient) $ - Stream.add i tgts pload =<< view (options.optSettings.setNotificationTTL) - void . forkIO $ do - prs <- Web.push notif tgts (p^.pushOrigin) (p^.pushOriginConnection) (p^.pushConnections) - pushNative notif p =<< nativeTargets p prs + mpaStreamAdd i tgts pload =<< mpaNotificationTTL + mpaForkIO $ do + prs <- mpyPush notif tgts (p^.pushOrigin) (p^.pushOriginConnection) (p^.pushConnections) + unless (p^.pushTransient) $ + mpaPushNative notif p =<< nativeTargets p prs where mkTarget :: Recipient -> NotificationTarget - mkTarget r = target (r^.recipientId) & targetClients .~ r^.recipientClients - --- | This class abstracts over all effects in 'pushAll' in order to make unit testing possible. --- (Even though we ended up not having any unit tests in the end.) -class (MonadThrow m, MonadReader Env m) => MonadPushAll m where - mpaMkNotificationId :: m NotificationId - mpaListAllPresences :: ([UserId] -> m [[Presence]]) - mpaBulkPush :: ([(Notification, [Presence])] -> m [(NotificationId, [Presence])]) - mpaStreamAdd :: NotificationId -> List1 NotificationTarget -> List1 Aeson.Object -> NotificationTTL -> m () - mpaNativeTargets :: Push -> [Presence] -> m [Address "no-keys"] - mpaPushNative :: Notification -> Push -> [Address "no-keys"] -> m () - mpaForkIO :: m () -> m () - -instance MonadPushAll Gundeck where - mpaMkNotificationId = mkNotificationId - mpaListAllPresences = Presence.listAll - mpaBulkPush = Web.bulkPush - mpaStreamAdd = Stream.add - mpaNativeTargets = nativeTargets - mpaPushNative = pushNative - mpaForkIO = void . forkIO + mkTarget r = + target (r^.recipientId) + & targetClients .~ case r^.recipientClients of + RecipientClientsAll -> [] + RecipientClientsSome cs -> toList cs -- | Construct and send a single bulk push request to the client. Write the 'Notification's from -- the request to C*. Trigger native pushes for all delivery failures notifications. -pushAll :: MonadPushAll m => [Push] -> m () +pushAll :: (MonadPushAll m, MonadNativeTargets m) => [Push] -> m () pushAll pushes = do targets :: [(Push, (Notification, List1 (Recipient, [Presence])))] <- zip pushes <$> (mkNotificationAndTargets `mapM` pushes) -- persist push request - let cassandraTargets :: [(Push, (Notification, List1 NotificationTarget))] - cassandraTargets = (_2 . _2 %~ (mkNotificationTarget . fst <$>)) <$> targets + cassandraTargets = (_2 . _2 %~ (mkTarget . fst <$>)) <$> targets where - mkNotificationTarget :: Recipient -> NotificationTarget - mkNotificationTarget r = target (r ^. recipientId) - & targetClients .~ r ^. recipientClients + mkTarget :: Recipient -> NotificationTarget + mkTarget r = + target (r^.recipientId) + & targetClients .~ case r^.recipientClients of + RecipientClientsAll -> [] + -- clients are stored in cassandra as a list with a notification. empty list + -- is interpreted as "all clients" by 'Gundeck.Notification.Data.toNotif'. + RecipientClientsSome cs -> toList cs forM_ cassandraTargets $ \(psh, (notif, notifTrgt)) -> unless (psh ^. pushTransient) $ mpaStreamAdd (ntfId notif) notifTrgt (psh ^. pushPayload) - =<< view (options . optSettings . setNotificationTTL) + =<< mpaNotificationTTL mpaForkIO $ do -- websockets - - let notifIdMap = Map.fromList $ (\(psh, (notif, _)) -> (ntfId notif, (notif, psh))) <$> targets - resp <- mapM (compilePushResp notifIdMap) =<< mpaBulkPush (compilePushReq <$> targets) + resp <- compilePushResps targets <$> mpaBulkPush (compilePushReq <$> targets) -- native push - - forM_ resp $ \((notif, psh), alreadySent) -> do - natives <- mpaNativeTargets psh alreadySent - mpaPushNative notif psh natives + forM_ resp $ \((notif, psh), alreadySent) -> unless (psh ^. pushTransient) $ + mpaPushNative notif psh =<< nativeTargets psh alreadySent +-- REFACTOR: @[Presence]@ here should be @newtype WebSockedDelivered = WebSockedDelivered [Presence]@ compilePushReq :: (Push, (Notification, List1 (Recipient, [Presence]))) -> (Notification, [Presence]) compilePushReq (psh, notifsAndTargets) = notifsAndTargets & _2 %~ (mconcat . fmap compileTargets . toList) where compileTargets :: (Recipient, [Presence]) -> [Presence] - compileTargets (rcp, pre) = fmap snd - . filter (uncurry (shouldActuallyPush psh)) - $ (rcp,) <$> pre - -compilePushResp :: MonadThrow m - => Map.Map NotificationId (Notification, Push) - -> (NotificationId, [Presence]) - -> m ((Notification, Push), [Presence]) -compilePushResp notifIdMap (notifId, prcs) = (, prcs) <$> lkup - where - lkup = maybe (throwM internalError) pure $ Map.lookup notifId notifIdMap - internalError = ErrorCall "bulkpush: dangling notificationId in response!" + compileTargets (rcp, pre) = filter (shouldActuallyPush psh rcp) pre + +compilePushResps + :: [(Push, (Notification, any))] + -> [(NotificationId, [Presence])] + -> [((Notification, Push), [Presence])] +compilePushResps notifIdMap (Map.fromList -> deliveries) = + notifIdMap <&> + (\(psh, (notif, _)) -> ((notif, psh), fromMaybe [] (Map.lookup (ntfId notif) deliveries))) -- | Look up 'Push' recipients in Redis, construct a notifcation, and return all the data needed for @@ -209,30 +246,30 @@ shouldActuallyPush psh rcp pres = not isOrigin && okByPushWhitelist && okByRecip okByRecipientWhitelist :: Bool okByRecipientWhitelist = case (rcp ^. recipientClients, clientId pres) of - (cs@(_:_), Just c) -> c `elem` cs - _ -> True + (RecipientClientsSome cs, Just c) -> c `elem` cs + _ -> True --- | Failures to pushy natively can be ignored. Logging already happens in +-- | Failures to push natively can be ignored. Logging already happens in -- 'Gundeck.Push.Native.push1', and we cannot recover from any of the error cases. pushNative :: Notification -> Push -> [Address "no-keys"] -> Gundeck () pushNative _ _ [] = return () -pushNative (ntfTransient -> True) _ _ = Log.warn $ msg (val "Transient notification failed") pushNative notif p rcps = do let prio = p^.pushNativePriority void $ Native.push (Native.Notice (ntfId notif) prio Nothing) rcps -nativeTargets :: Push -> [Presence] -> Gundeck [Address "no-keys"] +nativeTargets :: forall m. MonadNativeTargets m => Push -> [Presence] -> m [Address "no-keys"] nativeTargets p pres = let rcps' = filter routeNative (toList (fromRange (p^.pushRecipients))) - in mapAsync addresses rcps' >>= fmap concat . mapM check + in mntgtMapAsync addresses rcps' >>= fmap concat . mapM check where -- Interested in native pushes? routeNative u = u^.recipientRoute /= RouteDirect && (u^.recipientId /= p^.pushOrigin || p^.pushNativeIncludeOrigin) + addresses :: Recipient -> m [Address "no-keys"] addresses u = do - addrs <- Data.lookup (u^.recipientId) Data.One + addrs <- mntgtLookupAddress (u^.recipientId) return $ preference . filter (eligible u) $ addrs @@ -242,6 +279,8 @@ nativeTargets p pres = | a^.addrUser == p^.pushOrigin && Just (a^.addrConn) == p^.pushOriginConnection = False -- Is the specific client an intended recipient? | not (eligibleClient a (u^.recipientClients)) = False + -- Is the client not whitelisted? + | not (whitelistedOrNoWhitelist a) = False -- Include client if not found in presences. | otherwise = isNothing (List.find (isOnline a) pres) @@ -250,8 +289,11 @@ nativeTargets p pres = equalClient a x = Just (a^.addrClient) == Presence.clientId x - eligibleClient _ [] = True - eligibleClient a cs = (a^.addrClient) `elem` cs + eligibleClient _ RecipientClientsAll = True + eligibleClient a (RecipientClientsSome cs) = (a^.addrClient) `elem` cs + + whitelistedOrNoWhitelist a = null (p^.pushConnections) + || a^.addrConn `elem` p^.pushConnections -- Apply transport preference in case of alternative transports for the -- same client (currently only APNS vs APNS VoIP). If no explicit @@ -275,8 +317,8 @@ nativeTargets p pres = LowPriority -> ApsStdPreference HighPriority -> ApsVoIPPreference - check (Left e) = Log.err (msg (val "Failed to get native push address: " +++ show e)) - >> return [] + check :: Either SomeException [a] -> m [a] + check (Left e) = mntgtLogErr e >> return [] check (Right r) = return r addToken :: UserId ::: ConnId ::: Request ::: JSON ::: JSON -> Gundeck Response diff --git a/services/gundeck/src/Gundeck/Push/Native/Types.hs b/services/gundeck/src/Gundeck/Push/Native/Types.hs index 8c5e489abbe..0ff656f7a0c 100644 --- a/services/gundeck/src/Gundeck/Push/Native/Types.hs +++ b/services/gundeck/src/Gundeck/Push/Native/Types.hs @@ -7,7 +7,7 @@ module Gundeck.Push.Native.Types ( Result (..) , Failure (..) , Message (..) - , Address (Address) + , Address (..) , addrUser , addrTransport , addrApp @@ -34,6 +34,9 @@ import Gundeck.Aws.Arn import Gundeck.Types -- | Native push address information of a device. +-- +-- REFACTOR: the @s@ phantom type can probably go away, too! +-- REFACTOR: PushToken is embedded in this type, that should probably become a tree? especially since EnpointArn is also nested. data Address (s :: Symbol) = Address { _addrUser :: !UserId , _addrTransport :: !Transport @@ -43,6 +46,7 @@ data Address (s :: Symbol) = Address , _addrConn :: !ConnId , _addrClient :: !ClientId } + deriving (Eq, Ord) makeLenses ''Address diff --git a/services/gundeck/src/Gundeck/Push/Websocket.hs b/services/gundeck/src/Gundeck/Push/Websocket.hs index c10b3d7d2ca..c48b5db1ad2 100644 --- a/services/gundeck/src/Gundeck/Push/Websocket.hs +++ b/services/gundeck/src/Gundeck/Push/Websocket.hs @@ -6,15 +6,15 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ViewPatterns #-} -module Gundeck.Push.Websocket (push, bulkPush) where +module Gundeck.Push.Websocket (push, bulkPush, MonadBulkPush(..)) where import Imports -import Bilge +import Bilge hiding (trace) import Bilge.Retry (rpcHandlers) import Bilge.RPC import Control.Arrow ((&&&)) import Control.Exception (ErrorCall(ErrorCall)) -import Control.Monad.Catch (MonadThrow, throwM, catch, try) +import Control.Monad.Catch (MonadThrow, MonadCatch, MonadMask, throwM, catch, try) import Control.Lens ((^.), (%~), _2, view) import Control.Retry import Data.Aeson (encode, eitherDecode) @@ -42,12 +42,29 @@ import qualified Network.HTTP.Client.Internal as Http import qualified Network.URI as URI import qualified System.Logger.Class as Log +class (Monad m, MonadThrow m, Log.MonadLogger m) => MonadBulkPush m where + mbpBulkSend :: URI -> BulkPushRequest -> m (URI, Either SomeException BulkPushResponse) + mbpDeleteAllPresences :: [Presence] -> m () + mbpPosixTime :: m Milliseconds + mbpMapConcurrently :: Traversable t => (a -> m b) -> t a -> m (t b) + mbpMonitorBadCannons :: (URI, (SomeException, [Presence])) -> m () + +instance MonadBulkPush Gundeck where + mbpBulkSend = bulkSend + mbpDeleteAllPresences = Presence.deleteAll + mbpPosixTime = posixTime + mbpMapConcurrently = mapConcurrently + mbpMonitorBadCannons = monitorBadCannons + -- | Send a 'Notification's to associated 'Presence's. Send at most one request to each Cannon. -- Return the lists of 'Presence's successfully reached for each resp. 'Notification'. -bulkPush :: [(Notification, [Presence])] -> Gundeck [(NotificationId, [Presence])] +bulkPush :: forall m. MonadBulkPush m => [(Notification, [Presence])] -> m [(NotificationId, [Presence])] + -- REFACTOR: make presences lists (and notification list) non-empty where applicable? are there + -- better types to express more of our semantics / invariants? (what about duplicates in presence + -- lists?) bulkPush notifs = do let reqs = fanOut notifs - flbck <- flowBack <$> (uncurry bulkSend `mapConcurrently` reqs) + flbck <- flowBack <$> (uncurry mbpBulkSend `mbpMapConcurrently` reqs) let -- lookup by 'URI' can fail iff we screwed up URI handling in this module. presencesByCannon = mkPresencesByCannon . mconcat $ snd <$> notifs @@ -64,12 +81,12 @@ bulkPush notifs = do successes :: [(NotificationId, Presence)] <- (\(nid, trgt) -> (nid,) <$> presenceByPushTarget trgt) `mapM` flowBackDelivered flbck - logBadCannons `mapM_` badCannons - logPrcsGone `mapM_` prcsGone - logSuccesses `mapM_` successes + (\info -> mbpMonitorBadCannons info >> logBadCannons info) `mapM_` badCannons + logPrcsGone `mapM_` prcsGone + logSuccesses `mapM_` successes - Presence.deleteAll =<< do - now <- posixTime + mbpDeleteAllPresences =<< do + now <- mbpPosixTime let deletions = prcsGone <> (filter dead . mconcat $ snd . snd <$> badCannons) dead prc = now - createdAt prc > 10 * posixDay posixDay = Ms (round (1000 * posixDayLength)) @@ -78,11 +95,14 @@ bulkPush notifs = do pure (groupAssoc successes) -- | log all cannons with response status @/= 200@. -logBadCannons :: (MonadIO m, MonadReader Env m, Log.MonadLogger m) - => (URI, (SomeException, [Presence])) -> m () -logBadCannons (uri, (err, prcs)) = do +monitorBadCannons :: (MonadIO m, MonadReader Env m) + => (uri, (error, [Presence])) -> m () +monitorBadCannons (_uri, (_err, prcs)) = do view monitor >>= Metrics.counterAdd (fromIntegral $ length prcs) (Metrics.path "push.ws.unreachable") + +logBadCannons :: Log.MonadLogger m => (URI, (SomeException, [Presence])) -> m () +logBadCannons (uri, (err, prcs)) = do forM_ prcs $ \prc -> Log.warn $ logPresence prc ~~ Log.field "created_at" (ms $ createdAt prc) @@ -114,10 +134,14 @@ fanOut pullUri :: (notif, [Presence]) -> [(notif, (URI, Presence))] pullUri (notif, prcs) = (notif,) . (bulkresource &&& id) <$> prcs -bulkSend :: URI -> BulkPushRequest -> Gundeck (URI, Either SomeException BulkPushResponse) +bulkSend + :: forall m. (MonadIO m, MonadThrow m, MonadCatch m, MonadMask m, HasRequestId m, MonadHttp m) + => URI -> BulkPushRequest -> m (URI, Either SomeException BulkPushResponse) bulkSend uri req = (uri,) <$> ((Right <$> bulkSend' uri req) `catch` (pure . Left)) -bulkSend' :: URI -> BulkPushRequest -> Gundeck BulkPushResponse +bulkSend' + :: forall m. (MonadIO m, MonadThrow m, MonadCatch m, MonadMask m, HasRequestId m, MonadHttp m) + => URI -> BulkPushRequest -> m BulkPushResponse bulkSend' uri (encode -> jsbody) = do req <- ( check . method POST diff --git a/services/gundeck/test/integration/API.hs b/services/gundeck/test/integration/API.hs index 0211353d7dd..a1f726d8010 100644 --- a/services/gundeck/test/integration/API.hs +++ b/services/gundeck/test/integration/API.hs @@ -12,8 +12,8 @@ module API (TestSetup(..), tests) where import Bilge import Bilge.Assert import Control.Arrow ((&&&)) -import Control.Concurrent.Async (Async, async, wait, forConcurrently_) -import Control.Lens ((.~), (^.), (^?), view, (<&>)) +import Control.Concurrent.Async (Async, async, wait, concurrently_, forConcurrently_) +import Control.Lens ((.~), (^.), (^?), view, (<&>), _2, (%~)) import Control.Retry (retrying, constantDelay, limitRetries) import Data.Aeson hiding (json) import Data.Aeson.Lens @@ -112,6 +112,10 @@ tests s = testGroup "Gundeck integration tests" [ [ test s "register a push token" $ testRegisterPushToken , test s "unregister a push token" $ testUnregisterPushToken ], + testGroup "Websocket pingpong" + [ test s "pings produce pongs" $ testPingPong + , test s "non-pings are ignored" $ testNoPingNoPong + ], -- TODO: The following tests require (at the moment), the usage real AWS -- services so they are kept in a separate group to simplify testing testGroup "RealAWS" @@ -135,7 +139,7 @@ removeUser g c _ s = do clt <- randomClientId tok <- randomToken clt gcmToken _ <- registerPushToken user tok g - _ <- sendPush g (buildPush user [(user, [])] (textPayload "data")) + _ <- sendPush g (buildPush user [(user, RecipientClientsAll)] (textPayload "data")) deleteUser g user ntfs <- listNotifications user Nothing g liftIO $ do @@ -389,7 +393,8 @@ targetClientPush gu ca _ _ = do where pevent c = HashMap.fromList [ "foo" .= client c ] pload c = List1.singleton (pevent c) - rcpt u c = recipient u RouteAny & recipientClients .~ [c] + rcpt u c = recipient u RouteAny + & recipientClients .~ RecipientClientsSome (List1.singleton c) push u c = newPush u (unsafeRange (Set.singleton (rcpt u c))) (pload c) ----------------------------------------------------------------------------- @@ -405,7 +410,7 @@ testFetchAllNotifs :: TestSignature () testFetchAllNotifs gu _ _ _ = do ally <- randomId let pload = textPayload "hello" - replicateM_ 10 (sendPush gu (buildPush ally [(ally, [])] pload)) + replicateM_ 10 (sendPush gu (buildPush ally [(ally, RecipientClientsAll)] pload)) ns <- listNotifications ally Nothing gu liftIO $ assertEqual "Unexpected notification count" 10 (length ns) liftIO $ assertEqual "Unexpected notification payloads" @@ -416,7 +421,7 @@ testFetchNewNotifs :: TestSignature () testFetchNewNotifs gu _ _ _ = do ally <- randomId let pload = textPayload "hello" - replicateM_ 4 (sendPush gu (buildPush ally [(ally, [])] pload)) + replicateM_ 4 (sendPush gu (buildPush ally [(ally, RecipientClientsAll)] pload)) ns <- map (view queuedNotificationId) <$> listNotifications ally Nothing gu get ( runGundeck gu . zUser ally @@ -429,7 +434,7 @@ testFetchNewNotifs gu _ _ _ = do testNoNewNotifs :: TestSignature () testNoNewNotifs gu _ _ _ = do ally <- randomId - sendPush gu (buildPush ally [(ally, [])] (textPayload "hello")) + sendPush gu (buildPush ally [(ally, RecipientClientsAll)] (textPayload "hello")) (n:_) <- map (view queuedNotificationId) <$> listNotifications ally Nothing gu get ( runGundeck gu . zUser ally @@ -442,10 +447,10 @@ testNoNewNotifs gu _ _ _ = do testMissingNotifs :: TestSignature () testMissingNotifs gu _ _ _ = do other <- randomId - sendPush gu (buildPush other [(other, [])] (textPayload "hello")) + sendPush gu (buildPush other [(other, RecipientClientsAll)] (textPayload "hello")) (old:_) <- map (view queuedNotificationId) <$> listNotifications other Nothing gu ally <- randomId - sendPush gu (buildPush ally [(ally, [])] (textPayload "hello")) + sendPush gu (buildPush ally [(ally, RecipientClientsAll)] (textPayload "hello")) ns <- listNotifications ally Nothing gu get ( runGundeck gu . zUser ally @@ -457,8 +462,8 @@ testMissingNotifs gu _ _ _ = do testFetchLastNotif :: TestSignature () testFetchLastNotif gu _ _ _ = do ally <- randomId - sendPush gu (buildPush ally [(ally, [])] (textPayload "first")) - sendPush gu (buildPush ally [(ally, [])] (textPayload "last")) + sendPush gu (buildPush ally [(ally, RecipientClientsAll)] (textPayload "first")) + sendPush gu (buildPush ally [(ally, RecipientClientsAll)] (textPayload "last")) [_, n] <- listNotifications ally Nothing gu get (runGundeck gu . zUser ally . paths ["notifications", "last"]) !!! do const 200 === statusCode @@ -474,7 +479,7 @@ testNoLastNotif gu _ _ _ = do testFetchNotifBadSince :: TestSignature () testFetchNotifBadSince gu _ _ _ = do ally <- randomId - sendPush gu (buildPush ally [(ally, [])] (textPayload "first")) + sendPush gu (buildPush ally [(ally, RecipientClientsAll)] (textPayload "first")) ns <- listNotifications ally Nothing gu get ( runGundeck gu . zUser ally @@ -488,8 +493,10 @@ testFetchNotifById gu _ _ _ = do ally <- randomId c1 <- randomClientId c2 <- randomClientId - sendPush gu (buildPush ally [(ally, [c1])] (textPayload "first")) - sendPush gu (buildPush ally [(ally, [c2])] (textPayload "second")) + sendPush gu (buildPush ally [(ally, RecipientClientsSome (List1.singleton c1))] + (textPayload "first")) + sendPush gu (buildPush ally [(ally, RecipientClientsSome (List1.singleton c2))] + (textPayload "second")) [n1, n2] <- listNotifications ally Nothing gu forM_ [(n1, c1), (n2, c2)] $ \(n, c) -> let nid = toByteString' (view queuedNotificationId n) @@ -510,7 +517,8 @@ testFilterNotifByClient gu _ _ _ = do clt3 <- randomClientId -- Add a notification for client 1 - sendPush gu (buildPush alice [(alice, [clt1])] (textPayload "first")) + sendPush gu (buildPush alice [(alice, RecipientClientsSome (List1.singleton clt1))] + (textPayload "first")) [n] <- listNotifications alice (Just clt1) gu -- get all for the first client @@ -527,7 +535,8 @@ testFilterNotifByClient gu _ _ _ = do const (Just [n]) === parseNotifications -- Add another notification for client 3 - sendPush gu (buildPush alice [(alice, [clt3])] (textPayload "last")) + sendPush gu (buildPush alice [(alice, RecipientClientsSome (List1.singleton clt3))] + (textPayload "last")) [n'] <- listNotifications alice (Just clt3) gu -- get last for the first client @@ -548,8 +557,9 @@ testFilterNotifByClient gu _ _ _ = do const (Just n') === parseNotification -- Add a lot of notifications for client 3 - replicateM_ 101 $ sendPush gu (buildPush alice [(alice, [clt3])] - (textPayload "final")) + replicateM_ 101 $ sendPush gu + (buildPush alice [(alice, RecipientClientsSome (List1.singleton clt3))] + (textPayload "final")) ns <- listNotifications alice (Just clt3) gu liftIO $ assertBool "notification count" (length ns == 102) @@ -572,7 +582,7 @@ testNotificationPaging :: TestSignature () testNotificationPaging gu _ _ _ = do -- Without client ID u1 <- randomId - replicateM_ 399 (insert u1 Nothing) + replicateM_ 399 (insert u1 RecipientClientsAll) paging u1 Nothing 399 399 [399, 0] paging u1 Nothing 399 100 [100, 100, 100, 99, 0] paging u1 Nothing 399 101 [101, 101, 101, 96, 0] @@ -581,7 +591,9 @@ testNotificationPaging gu _ _ _ = do u2 <- randomId clients@[c1, c2, c3] <- replicateM 3 randomClientId let numClients = length clients - forM_ [0..999] (insert u2 . Just . (clients !!) . (`mod` numClients)) + forM_ [0..999] $ \i -> do + let c = clients !! (i `mod` numClients) + insert u2 (RecipientClientsSome (List1.singleton c)) -- View of client 1 paging u2 (Just c1) 334 100 [100, 100, 100, 34, 0] paging u2 (Just c1) 334 334 [334, 0] @@ -594,14 +606,14 @@ testNotificationPaging gu _ _ _ = do -- With overlapped pages and excess elements on the last page u3 <- randomId - replicateM_ 90 (insert u3 (Just c1)) - replicateM_ 20 (insert u3 (Just c2)) - replicateM_ 20 (insert u3 (Just c1)) + replicateM_ 90 $ insert u3 (RecipientClientsSome (List1.singleton c1)) + replicateM_ 20 $ insert u3 (RecipientClientsSome (List1.singleton c2)) + replicateM_ 20 $ insert u3 (RecipientClientsSome (List1.singleton c1)) paging u3 (Just c1) 110 100 [100, 10, 0] paging u3 (Just c1) 110 110 [110, 0] paging u3 (Just c2) 20 100 [20, 0] where - insert u c = sendPush gu (buildPush u [(u, maybeToList c)] (textPayload "data")) + insert u c = sendPush gu (buildPush u [(u, c)] (textPayload "data")) paging u c total step = foldM_ (next u c (total, step)) (0, Nothing) @@ -711,6 +723,28 @@ testUnregisterPushToken g _ b _ = do void $ retryWhileN 12 (not . null) (listPushTokens uid g) unregisterPushToken uid (tkn^.token) g !!! const 404 === statusCode +testPingPong :: TestSignature () +testPingPong gu ca _ _ = do + uid :: UserId <- randomId + connid :: ConnId <- randomConnId + [(_, [(chread, chwrite)] :: [(TChan ByteString, TChan ByteString)])] + <- connectUsersAndDevicesWithSendingClients gu ca [(uid, [connid])] + liftIO $ do + atomically $ writeTChan chwrite "ping" + msg <- waitForMessage chread + assertBool "no pong" $ msg == Just "pong" + +testNoPingNoPong :: TestSignature () +testNoPingNoPong gu ca _ _ = do + uid :: UserId <- randomId + connid :: ConnId <- randomConnId + [(_, [(chread, chwrite)] :: [(TChan ByteString, TChan ByteString)])] + <- connectUsersAndDevicesWithSendingClients gu ca [(uid, [connid])] + liftIO $ do + atomically $ writeTChan chwrite "Wire is so much nicer with internet!" + msg <- waitForMessage chread + assertBool "unexpected response on non-ping" $ isNothing msg + testSharePushToken :: TestSignature () testSharePushToken g _ b _ = do gcmTok <- Token . T.decodeUtf8 . toByteString' <$> randomId @@ -808,13 +842,25 @@ connectUser gu ca uid con = do [(_, [ch])] <- connectUsersAndDevices gu ca [(uid, [con])] return ch -connectUsersAndDevices :: HasCallStack => Gundeck -> Cannon -> [(UserId, [ConnId])] -> Http [(UserId, [TChan ByteString])] -connectUsersAndDevices gu ca uidsAndConnIds = do +connectUsersAndDevices + :: HasCallStack + => Gundeck -> Cannon -> [(UserId, [ConnId])] + -> Http [(UserId, [TChan ByteString])] +connectUsersAndDevices gu ca uidsAndConnIds = + strip <$> connectUsersAndDevicesWithSendingClients gu ca uidsAndConnIds + where strip = fmap (_2 %~ fmap fst) + +connectUsersAndDevicesWithSendingClients + :: HasCallStack + => Gundeck -> Cannon -> [(UserId, [ConnId])] + -> Http [(UserId, [(TChan ByteString, TChan ByteString)])] +connectUsersAndDevicesWithSendingClients gu ca uidsAndConnIds = do chs <- forM uidsAndConnIds $ \(uid, conns) -> (uid,) <$> do forM conns $ \conn -> do - ch <- liftIO $ atomically newTChan - _ <- wsRun ca uid conn (wsReader ch) - pure ch + chread <- liftIO $ atomically newTChan + chwrite <- liftIO $ atomically newTChan + _ <- wsRun ca uid conn (wsReaderWriter chread chwrite) + pure (chread, chwrite) (\(uid, conns) -> wsAssertPresences gu uid (length conns)) `mapM_` uidsAndConnIds pure chs @@ -841,8 +887,10 @@ wsAssertPresences gu uid numPres = do wsCloser :: MVar () -> WS.ClientApp () wsCloser m conn = takeMVar m >> WS.sendClose conn C.empty >> putMVar m () -wsReader :: TChan ByteString -> WS.ClientApp () -wsReader ch conn = forever $ WS.receiveData conn >>= atomically . writeTChan ch +wsReaderWriter :: TChan ByteString -> TChan ByteString -> WS.ClientApp () +wsReaderWriter chread chwrite conn = concurrently_ + (forever $ WS.receiveData conn >>= atomically . writeTChan chread) + (forever $ WS.sendTextData conn =<< atomically (readTChan chwrite)) retryWhile :: (MonadIO m) => (a -> Bool) -> m a -> m a retryWhile = retryWhileN 10 @@ -929,7 +977,9 @@ sendPushes :: HasCallStack => Gundeck -> [Push] -> Http () sendPushes gu push = post ( runGundeck gu . path "i/push/v2" . json push ) !!! const 200 === statusCode -buildPush :: HasCallStack => UserId -> [(UserId, [ClientId])] -> List1 Object -> Push +buildPush + :: HasCallStack + => UserId -> [(UserId, RecipientClients)] -> List1 Object -> Push buildPush sdr rcps pload = let rcps' = Set.fromList (map (uncurry rcpt) rcps) in newPush sdr (unsafeRange rcps') pload diff --git a/services/gundeck/test/unit/Json.hs b/services/gundeck/test/unit/Json.hs index 42481a0dd9e..8f3a93d1b03 100644 --- a/services/gundeck/test/unit/Json.hs +++ b/services/gundeck/test/unit/Json.hs @@ -61,9 +61,15 @@ serialiseOkProp r = property $ genRecipient :: Gen Recipient genRecipient = do r <- recipient <$> arbitrary <*> elements [ RouteAny, RouteDirect, RouteNative ] - c <- arbitrary + c <- genRecipientClients return $ r & set recipientClients c +genRecipientClients :: Gen RecipientClients +genRecipientClients = + oneof [ pure RecipientClientsAll + , RecipientClientsSome . List1 <$> arbitrary + ] + genBulkPushRequest :: Gen BulkPushRequest genBulkPushRequest = BulkPushRequest <$> shortListOf ((,) <$> genNotification <*> scale (`div` 3) (listOf genPushTarget)) diff --git a/services/gundeck/test/unit/Main.hs b/services/gundeck/test/unit/Main.hs index 417901cee34..bafdeb5c085 100644 --- a/services/gundeck/test/unit/Main.hs +++ b/services/gundeck/test/unit/Main.hs @@ -7,11 +7,13 @@ import Test.Tasty import qualified DelayQueue import qualified Json import qualified Native +import qualified Push main :: IO () main = withOpenSSL . defaultMain $ testGroup "Main" - [ Native.tests - , DelayQueue.tests + [ DelayQueue.tests , Json.tests + , Native.tests + , Push.tests ] diff --git a/services/gundeck/test/unit/MockGundeck.hs b/services/gundeck/test/unit/MockGundeck.hs new file mode 100644 index 00000000000..8f60de60cac --- /dev/null +++ b/services/gundeck/test/unit/MockGundeck.hs @@ -0,0 +1,747 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE ViewPatterns #-} + +{-# OPTIONS_GHC -Wno-orphans #-} + +-- | This is 'MockGundeck', a re-implementation of the 'Gundeck' monad with mock effects. This has +-- been developed to test 'Gundeck.Push.pushAll' and 'Gundeck.Push.Websocket.bulkPush', but can be +-- extended. +-- +-- Besides 'MockGundeck' and its instances, there are two important types: 'MockEnv' and +-- 'MockState'. 'MockEnv' contains the system input to the test run; 'MockState' is manipulated by +-- the mock implementations and can be examined to make sure all the expected effects have been +-- caused. +-- +-- This module is structured as follows: first, 'MockEnv' and 'MockState' are defined with their +-- 'Arbitrary' generators. Then, 'MockGundeck' and its instances are defined; then the actual mock +-- effects are implemented. +-- +-- There is a cascade of mockings: 'pushAll' can be run on the same input as 'mockPushAll', and +-- outputs and states can be tested to be equal. 'pushAll' calls 'bulkPush', but in the @instance +-- MonadPushAll MockGundeck@, you can either call the real thing or 'mockBulkPush'. +module MockGundeck where + +import Imports +import Control.Lens +import Control.Monad.Catch +import Control.Monad.Except +import Control.Monad.Random +import Control.Monad.State +import Data.Aeson +import Data.Id +import Data.IntMultiSet (IntMultiSet) +import Data.List1 +import Data.Misc ((<$$>), Milliseconds(Ms)) +import Data.Range +import Data.String.Conversions +import Gundeck.Aws.Arn as Aws +import Gundeck.Options +import Gundeck.Push +import Gundeck.Push.Native as Native +import Gundeck.Push.Websocket as Web +import Gundeck.Types hiding (recipient) +import Gundeck.Types.BulkPush +import System.Logger.Class as Log hiding (trace) +import Test.QuickCheck as QC +import Test.QuickCheck.Instances () + +import qualified Data.Aeson as Aeson +import qualified Data.Aeson.Encode.Pretty as Aeson +import qualified Data.HashMap.Lazy as HashMap +import qualified Data.IntMultiSet as MSet +import qualified Data.List.NonEmpty as NE +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Data.Scientific as Scientific +import qualified Network.URI as URI + + +---------------------------------------------------------------------- +-- env + +-- | We really don't care about the actual payloads anywhere in these tests, just that the right +-- ones arrive over the right connections. So 'genPayload' is not very exhaustive, but only +-- generates small objects with one field containing a numeric value. It would be nice to represent +-- this in the 'Payload' type, but the 'List1 Aeson.Object' structure is used in the production +-- code, so in the end it is more awkward than nice. +type Payload = List1 Aeson.Object + +data ClientInfo = ClientInfo + { _ciNativeAddress :: Maybe (Address "no-keys", Bool{- reachable -}) + , _ciWSReachable :: Bool + } + deriving (Eq, Show) + +newtype MockEnv = MockEnv + { _meClientInfos :: Map UserId (Map ClientId ClientInfo) + } + deriving (Eq, Show) + +data MockState = MockState + { -- | A record of notifications that have been pushed via websockets. + _msWSQueue :: NotifQueue + -- | A record of notifications that have been pushed via native push. + , _msNativeQueue :: NotifQueue + -- | Non-transient notifications that are stored in the database first thing before + -- delivery (so clients can always come back and pick them up later until they expire). + , _msCassQueue :: NotifQueue + } + deriving (Eq) + +-- | For each client we store the set of notifications they are scheduled to receive. Notification +-- 'Payload's are converted into 'Int's for simplicity and to enable less verbose test errors (see +-- 'payloadToInt'). +type NotifQueue = Map (UserId, ClientId) IntMultiSet + +makeLenses ''ClientInfo +makeLenses ''MockEnv +makeLenses ''MockState + +instance Show MockState where + show (MockState w n c) = intercalate "\n" + ["", "websocket: " <> show w, "native: " <> show n, "cassandra: " <> show c, ""] + +emptyMockState :: MockState +emptyMockState = MockState mempty mempty mempty + +-- these custom instances make for better error reports if tests fail. +instance ToJSON MockEnv where + toJSON (MockEnv mp) = Aeson.object + [ "clientInfos" Aeson..= mp ] + +instance ToJSON ClientInfo where + toJSON (ClientInfo native wsreach) = Aeson.object + [ "native" Aeson..= native + , "wsReachable" Aeson..= wsreach + ] + +instance ToJSON (Address s) where + toJSON adr = Aeson.object + [ "user" Aeson..= (adr ^. addrUser) + , "transport" Aeson..= (adr ^. addrTransport) + , "app" Aeson..= (adr ^. addrApp) + , "token" Aeson..= (adr ^. addrToken) + , "endpoint" Aeson..= (serializeFakeAddrEndpoint $ adr ^. addrEndpoint) + , "conn" Aeson..= (adr ^. addrConn) + , "client" Aeson..= (adr ^. addrClient) + ] + +serializeFakeAddrEndpoint :: EndpointArn -> (Text, Transport, AppName) +serializeFakeAddrEndpoint ((^. snsTopic) -> eptopic) = + ( case eptopic ^. endpointId of EndpointId txt -> txt + , eptopic ^. endpointTransport + , eptopic ^. endpointAppName + ) + +instance FromJSON MockEnv where + parseJSON = withObject "MockEnv" $ \env -> MockEnv + <$> env Aeson..: "clientInfos" + +instance FromJSON ClientInfo where + parseJSON = withObject "ClientInfo" $ \cinfo -> ClientInfo + <$> (cinfo Aeson..: "native") + <*> (cinfo Aeson..: "wsReachable") + +instance FromJSON (Address s) where + parseJSON = withObject "Address" $ \adr -> Address + <$> (adr Aeson..: "user") + <*> (adr Aeson..: "transport") + <*> (adr Aeson..: "app") + <*> (adr Aeson..: "token") + <*> (mkFakeAddrEndpoint <$> adr Aeson..: "endpoint") + <*> (adr Aeson..: "conn") + <*> (adr Aeson..: "client") + +mkFakeAddrEndpoint :: (Text, Transport, AppName) -> EndpointArn +mkFakeAddrEndpoint (epid, transport, app) = Aws.mkSnsArn Tokyo (Account "acc") eptopic + where eptopic = mkEndpointTopic (ArnEnv "") transport app (EndpointId epid) + + +---------------------------------------------------------------------- +-- env generators + +-- | Generate an environment containing a mix of recipients with different +-- levels of brokenness: +-- +-- 1. web socket delivery will work +-- 2. web socket delivery will NOT work, native push token registered, push will succeed +-- 3. web socket delivery will NOT work, native push token registered, push will fail +-- 4. web socket delivery will NOT work, no native push token registered +genMockEnv :: HasCallStack => Gen MockEnv +genMockEnv = do + -- This function generates a 'ClientInfo' that corresponds to one of the + -- four scenarios above + let genClientInfo :: HasCallStack => UserId -> ClientId -> Gen ClientInfo + genClientInfo uid cid = do + _ciNativeAddress <- QC.oneof + [ pure Nothing + , do + protoaddr <- genProtoAddress uid cid + reachable <- arbitrary + pure $ Just (protoaddr, reachable) + ] + _ciWSReachable <- arbitrary + pure ClientInfo{..} + + -- Generate a list of users + uids :: [UserId] + <- nub <$> listOf1 genId + + -- For every user, generate several clients (preferring less clients) + cidss :: [[ClientId]] + <- let gencids _uid = do + len <- QC.frequency [ (4, QC.choose (1, 3)) + , (1, QC.choose (4, 8)) + ] + vectorOf len genClientId + + nubrec = upper mempty + where + upper _ [] = [] + upper visited (ys:xs) = case lower visited ys of + ([], visited') -> upper visited' xs -- uids, cidss are not aligned! + (ys', visited') -> ys' : upper visited' xs + + lower visited [] = ([], visited) + lower visited (y:ys) = if y `elem` visited + then lower visited ys + else case lower (y `Set.insert` visited) ys of + (ys', visited') -> (y : ys', visited') + + in nubrec <$> forM uids gencids + + -- Build an 'MockEnv' containing a map with all those 'ClientInfo's, and + -- check that it validates + env <- MockEnv . Map.fromList . fmap (_2 %~ Map.fromList) <$> do + forM (zip uids cidss) $ \(uid, cids) -> (uid,) <$> do + forM cids $ \cid -> (cid,) <$> genClientInfo uid cid + + validateMockEnv env & either error (const $ pure env) + +-- Try to shrink a 'MockEnv' by removing some users from '_meClientInfos'. +shrinkMockEnv :: HasCallStack => MockEnv -> [MockEnv] +shrinkMockEnv (MockEnv cis) = + MockEnv . Map.fromList <$> + filter (not . null) (shrinkList (const []) (Map.toList cis)) + +validateMockEnv :: forall m. MonadError String m => MockEnv -> m () +validateMockEnv env = do + checkIdsInNativeAddresses + -- (if you want to vaidate anything else, here is the place!) + where + -- UserId and ClientId contained in Address must match the keys under which they are stored. + checkIdsInNativeAddresses :: m () + checkIdsInNativeAddresses = do + forM_ (Map.toList $ env ^. meClientInfos) $ \(uid, cinfos) -> do + forM_ (Map.toList cinfos) $ \(cid, cinfo) -> do + forM_ (cinfo ^. ciNativeAddress) $ \(adr, _) -> do + unless (uid == adr ^. addrUser && cid == adr ^. addrClient) $ do + throwError (show (uid, cid, adr)) + +genRecipients :: HasCallStack => Int -> MockEnv -> Gen [Recipient] +genRecipients numrcp env = do + uids <- take numrcp <$> shuffle (allUsers env) + genRecipient' env `mapM` uids + +genRecipient :: HasCallStack => MockEnv -> Gen Recipient +genRecipient env = do + uid <- QC.elements (allUsers env) + genRecipient' env uid + +genRecipient' :: HasCallStack => MockEnv -> UserId -> Gen Recipient +genRecipient' env uid = do + route <- genRoute + cids <- QC.frequency + [ (1, pure RecipientClientsAll) + , (3, RecipientClientsSome <$> sublist1Of (clientIdsOfUser env uid)) + ] + pure $ Recipient uid route cids + +-- REFACTOR: see 'Route' type about missing 'RouteNative'. +genRoute :: HasCallStack => Gen Route +genRoute = QC.elements [RouteAny, RouteDirect] + +genId :: Gen (Id a) +genId = do + gen <- mkStdGen <$> arbitrary + pure . Id . fst $ random gen + +genClientId :: Gen ClientId +genClientId = newClientId <$> arbitrary + +genProtoAddress :: HasCallStack => UserId -> ClientId -> Gen (Address "no-keys") +genProtoAddress _addrUser _addrClient = do + _addrTransport :: Transport <- QC.elements [minBound..] + arnEpId :: Text <- arbitrary + let _addrApp = "AppName" + _addrToken = Token "tok" + _addrEndpoint = mkFakeAddrEndpoint (arnEpId, _addrTransport, _addrApp) + _addrConn = fakeConnId _addrClient + pure Address {..} + +genPushes :: MockEnv -> Gen [Push] +genPushes = listOf . genPush + +genPush :: HasCallStack => MockEnv -> Gen Push +genPush env = do + let alluids = allUsers env + sender <- QC.elements alluids + rcps :: Range 1 1024 (Set Recipient) <- do + numrcp <- choose (1, min 1024 (length alluids)) + rcps <- genRecipients numrcp env + unsafeRange . Set.fromList <$> dropSomeDevices `mapM` rcps + pload <- genPayload + inclorigin <- arbitrary + transient <- arbitrary + + let connIdsByUser = fmap fakeConnId <$> Map.fromList (allRecipients env) + allConnIds = mconcat $ Map.elems connIdsByUser + onlyPushToConnections <- do + -- from the list of all recipient connections, sometimes add some here. + oneof [ pure mempty + , fmap Set.fromList $ QC.sublistOf allConnIds + ] + originConnection <- do + -- if one of the recipients is the sender, we may 'Just' pick one of the devices of that + -- recipient here, or 'Nothing'. + let genOriginConnId = case mconcat . fmap extract . toList . fromRange $ rcps of + [] -> pure Nothing + conns@(_:_) -> Just <$> QC.elements conns + where + extract (Recipient uid _ _) | uid /= sender = [] + extract (Recipient _ _ (RecipientClientsSome cids)) = fakeConnId <$> toList cids + extract (Recipient _ _ RecipientClientsAll) = lookupAll + where lookupAll = fromMaybe [] $ Map.lookup sender connIdsByUser + oneof [ pure Nothing + , genOriginConnId + ] + + pure $ newPush sender rcps pload + & pushConnections .~ onlyPushToConnections + & pushOriginConnection .~ originConnection + & pushTransient .~ transient + & pushNativeIncludeOrigin .~ inclorigin + -- (not covered: pushNativeAps, pushNativePriority) + +-- | Shuffle devices. With probability 0.5, drop at least one device, but not all. If number of +-- devices is @<2@ or if devices are set to 'RecipientClientsAll', the input is returned. +dropSomeDevices :: Recipient -> Gen Recipient +dropSomeDevices = recipientClients %%~ \case + RecipientClientsAll -> pure RecipientClientsAll + RecipientClientsSome cids -> do + numdevs :: Int <- oneof [ pure $ length cids + , choose (1, max 1 (length cids - 1)) + ] + RecipientClientsSome . unsafeList1 . take numdevs <$> + QC.shuffle (toList cids) + +shrinkPushes :: HasCallStack => [Push] -> [[Push]] +shrinkPushes = shrinkList shrinkPush + where + shrinkPush :: HasCallStack => Push -> [Push] + shrinkPush psh = (\rcps -> psh & pushRecipients .~ rcps) <$> shrinkRecipients (psh ^. pushRecipients) + + shrinkRecipients :: HasCallStack => Range 1 1024 (Set Recipient) -> [Range 1 1024 (Set Recipient)] + shrinkRecipients = fmap unsafeRange . map Set.fromList . filter (not . null) . shrinkList shrinkRecipient . Set.toList . fromRange + + shrinkRecipient :: HasCallStack => Recipient -> [Recipient] + shrinkRecipient _ = [] + +-- | See 'Payload'. +genPayload :: Gen Payload +genPayload = do + num :: Int <- arbitrary + pure $ List1 (HashMap.singleton "val" (Aeson.toJSON num) NE.:| []) + +genNotif :: Gen Notification +genNotif = Notification <$> genId <*> arbitrary <*> genPayload + +genNotifs :: MockEnv -> Gen [(Notification, [Presence])] +genNotifs env = fmap uniqNotifs . listOf $ do + notif <- genNotif + prcs <- nub . mconcat <$> listOf (fakePresences' env <$> genRecipient env) + pure (notif, prcs) + where + uniqNotifs = nubBy ((==) `on` (ntfId . fst)) + +shrinkNotifs :: HasCallStack => [(Notification, [Presence])] -> [[(Notification, [Presence])]] +shrinkNotifs = shrinkList (\(notif, prcs) -> (notif,) <$> shrinkList (const []) prcs) + + +---------------------------------------------------------------------- +-- monad type and instances + +newtype MockGundeck a = MockGundeck + { fromMockGundeck :: ReaderT MockEnv (StateT MockState (RandT StdGen Identity)) a } + deriving (Functor, Applicative, Monad, MonadReader MockEnv, MonadState MockState, MonadRandom) + +runMockGundeck :: MockEnv -> MockGundeck a -> (a, MockState) +runMockGundeck env (MockGundeck m) = + runIdentity . (`evalRandT` mkStdGen 0) $ runStateT (runReaderT m env) emptyMockState + +instance MonadThrow MockGundeck where + throwM = error . show -- (we are not expecting any interesting errors in these tests, so we might + -- as well crash badly here, as long as it doesn't go unnoticed...) + +instance MonadPushAll MockGundeck where + mpaNotificationTTL = pure $ NotificationTTL 300 -- (longer than we want any test to take.) + mpaMkNotificationId = mockMkNotificationId + mpaListAllPresences = mockListAllPresences + mpaBulkPush = mockBulkPush + mpaStreamAdd = mockStreamAdd + mpaPushNative = mockPushNative + mpaForkIO = id -- just don't fork. (this *may* cause deadlocks in principle, but as long as it + -- doesn't, this is good enough for testing). + +instance MonadNativeTargets MockGundeck where + mntgtLogErr _ = pure () + mntgtLookupAddress = mockLookupAddress + mntgtMapAsync f xs = Right <$$> mapM f xs -- (no concurrency) + +instance MonadPushAny MockGundeck where + mpyPush = mockOldSimpleWebPush + +instance MonadBulkPush MockGundeck where + mbpBulkSend = mockBulkSend + mbpDeleteAllPresences _ = pure () -- FUTUREWORK: test presence deletion logic + mbpPosixTime = pure $ Ms 1545045904275 -- (time is constant) + mbpMapConcurrently = mapM -- (no concurrency) + mbpMonitorBadCannons _ = pure () -- (no monitoring) + +instance Log.MonadLogger MockGundeck where + log _ _ = pure () -- (no logging) + + +---------------------------------------------------------------------- +-- monad implementation + +-- | For a set of push notifications, compute the expected result of sending all of them. +-- This should match the result of doing 'Gundeck.Push.pushAll'. +-- +-- Every push causes some notifications to be sent via websockets, sent via native transport, +-- and stored in Cassandra. The complete logic of handling a push is correspondingly specified +-- in 'handlePushWS', 'handlePushNative' and 'handlePushCass' respectively. Those parts are all +-- independent of each other. +mockPushAll + :: (HasCallStack, m ~ MockGundeck) + => [Push] -> m () +mockPushAll pushes = do + forM_ pushes $ \psh -> do + handlePushWS psh + handlePushNative psh + handlePushCass psh + +-- | From a single 'Push', deliver only those notifications that real Gundeck would deliver via +-- websockets. +handlePushWS + :: (HasCallStack, m ~ MockGundeck) + => Push -> m () +handlePushWS Push{..} = do + env <- ask + forM_ (fromRange _pushRecipients) $ \(Recipient uid _ cids) -> do + let cids' = case cids of + RecipientClientsAll -> clientIdsOfUser env uid + RecipientClientsSome cc -> toList cc + forM_ cids' $ \cid -> do + -- Condition 1: only devices with a working websocket connection will get the push. + let isReachable = wsReachable env (uid, cid) + -- Condition 2: we never deliver pushes to the originating device. + let isOriginDevice = origin == (uid, Just cid) + -- Condition 3: push to cid iff (a) listed in pushConnections or (b) pushConnections is empty. + let isWhitelisted = null _pushConnections || fakeConnId cid `elem` _pushConnections + when (isReachable && not isOriginDevice && isWhitelisted) $ + msWSQueue %= deliver (uid, cid) _pushPayload + where + origin = (_pushOrigin, clientIdFromConnId <$> _pushOriginConnection) + +-- | From a single 'Push', deliver eligible 'Notification's via native transport. +handlePushNative + :: (HasCallStack, m ~ MockGundeck) + => Push -> m () +handlePushNative Push{..} + -- Condition 1: transient pushes are not sent via native transport. + | _pushTransient = pure () +handlePushNative Push{..} = do + env <- ask + forM_ (fromRange _pushRecipients) $ \(Recipient uid route cids) -> do + let cids' = case cids of + RecipientClientsAll -> clientIdsOfUser env uid + RecipientClientsSome cc -> toList cc + forM_ cids' $ \cid -> do + -- Condition 2: 'RouteDirect' pushes are not eligible for pushing via native transport. + let isNative = route /= RouteDirect + -- Condition 3: to get a native push, the device must be native-reachable but not + -- websocket-reachable, as websockets take priority. + let isReachable = nativeReachable env (uid, cid) && not (wsReachable env (uid, cid)) + -- Condition 4: the originating *user* can receive a native push only if + -- 'pushNativeIncludeOrigin' is true. Even so, the originating *device* should never + -- receive a push. + let isOriginUser = uid == fst origin + isOriginDevice = origin == (uid, Just cid) + isAllowedPerOriginRules = + not isOriginUser || (_pushNativeIncludeOrigin && not isOriginDevice) + -- Condition 5: push to cid iff (a) listed in pushConnections or (b) pushConnections is empty. + let isWhitelisted = null _pushConnections || fakeConnId cid `elem` _pushConnections + when (isNative && isReachable && isAllowedPerOriginRules && isWhitelisted) $ + msNativeQueue %= deliver (uid, cid) _pushPayload + where + origin = (_pushOrigin, clientIdFromConnId <$> _pushOriginConnection) + +-- | From a single 'Push', store only those notifications that real Gundeck would put into +-- Cassandra. +handlePushCass + :: (HasCallStack, m ~ MockGundeck) + => Push -> m () +handlePushCass Push{..} + -- Condition 1: transient pushes are not put into Cassandra. + | _pushTransient = pure () +handlePushCass Push{..} = do + forM_ (fromRange _pushRecipients) $ \(Recipient uid _ cids) -> do + let cids' = case cids of + RecipientClientsAll -> [ClientId mempty] + -- clients are stored in cassandra as a list with a notification. empty list is + -- intepreted as "all clients" by 'Gundeck.Notification.Data.toNotif'. (here, we just + -- store a specific 'ClientId' that signifies "no client".) + RecipientClientsSome cc -> toList cc + forM_ cids' $ \cid -> + msCassQueue %= deliver (uid, cid) _pushPayload + +mockMkNotificationId + :: (HasCallStack, m ~ MockGundeck) + => m NotificationId +mockMkNotificationId = Id <$> getRandom + +mockListAllPresences + :: (HasCallStack, m ~ MockGundeck) + => [UserId] -> m [[Presence]] +mockListAllPresences uids = + asks $ fmap fakePresences . filter ((`elem` uids) . fst) . allRecipients + +-- | Fake implementation of 'Web.bulkPush'. +mockBulkPush + :: (HasCallStack, m ~ MockGundeck) + => [(Notification, [Presence])] -> m [(NotificationId, [Presence])] +mockBulkPush notifs = do + env <- ask + + let delivered :: [(Notification, [Presence])] + delivered = [ (nid, prcs) + | (nid, filter (`elem` deliveredprcs) -> prcs) <- notifs + , not $ null prcs -- (sic!) (this is what gundeck currently does) + ] + + deliveredprcs :: [Presence] + deliveredprcs = filter isreachable . mconcat . fmap fakePresences $ allRecipients env + + isreachable :: Presence -> Bool + isreachable prc = wsReachable env (userId prc, fromJust $ clientId prc) + + forM_ delivered $ \(notif, prcs) -> do + forM_ prcs $ \prc -> msWSQueue %= + deliver (userId prc, clientIdFromConnId $ connId prc) (ntfPayload notif) + + pure $ (_1 %~ ntfId) <$> delivered + +-- | persisting notification is not needed for the tests at the moment, so we do nothing here. +mockStreamAdd + :: (HasCallStack, m ~ MockGundeck) + => NotificationId -> List1 NotificationTarget -> Payload -> NotificationTTL -> m () +mockStreamAdd _ (toList -> targets) pay _ = + forM_ targets $ \tgt -> case (tgt ^. targetClients) of + clients@(_:_) -> forM_ clients $ \cid -> + msCassQueue %= deliver (tgt ^. targetUser, cid) pay + [] -> + msCassQueue %= deliver (tgt ^. targetUser, ClientId mempty) pay + +mockPushNative + :: (HasCallStack, m ~ MockGundeck) + => Notification -> Push -> [Address "no-keys"] -> m () +mockPushNative _nid ((^. pushPayload) -> payload) addrs = do + env <- ask + forM_ addrs $ \addr -> do + when (nativeReachableAddr env addr) $ msNativeQueue %= + deliver (addr ^. addrUser, addr ^. addrClient) payload + +mockLookupAddress + :: (HasCallStack, m ~ MockGundeck) + => UserId -> m [Address "no-keys"] +mockLookupAddress uid = do + cinfos :: [ClientInfo] + <- Map.elems . + fromMaybe (error $ "mockLookupAddress: unknown UserId: " <> show uid) . + Map.lookup uid <$> + asks (^. meClientInfos) + pure . catMaybes $ (^? ciNativeAddress . _Just . _1) <$> cinfos + +mockBulkSend + :: (HasCallStack, m ~ MockGundeck) + => URI -> BulkPushRequest + -> m (URI, Either SomeException BulkPushResponse) +mockBulkSend uri notifs = do + getstatus <- mkWSStatus + let flat :: [(Notification, PushTarget)] + flat = case notifs of + (BulkPushRequest ntifs) -> + mconcat $ (\(ntif, trgts) -> (ntif,) <$> trgts) <$> ntifs + + forM_ flat $ \(ntif, ptgt) -> do + when (getstatus ptgt == PushStatusOk) $ msWSQueue %= + deliver (ptUserId ptgt, clientIdFromConnId $ ptConnId ptgt) (ntfPayload ntif) + + pure . (uri,) . Right $ BulkPushResponse + [ (ntfId ntif, trgt, getstatus trgt) | (ntif, trgt) <- flat ] + +mockOldSimpleWebPush + :: (HasCallStack, m ~ MockGundeck) + => Notification + -> List1 NotificationTarget + -> UserId + -> Maybe ConnId + -> Set ConnId + -> m [Presence] +mockOldSimpleWebPush notif tgts _senderid mconnid connWhitelist = do + env <- ask + getstatus <- mkWSStatus + + let clients :: [(UserId, ClientId)] + clients + = -- reformat + fmap (\(PushTarget uid connid) -> (uid, clientIdFromConnId connid)) + -- drop all broken web sockets + . filter ((== PushStatusOk) . getstatus) + -- do not push to sending device + . filter ((/= mconnid) . Just . ptConnId) + -- reformat + . mconcat . fmap (\tgt -> + PushTarget (tgt ^. targetUser) . fakeConnId + <$> (tgt ^. targetClients)) + -- apply filters + . fmap (connWhitelistSieve . emptyMeansFullHack) + $ toList tgts + + connWhitelistSieve :: NotificationTarget -> NotificationTarget + connWhitelistSieve = if null connWhitelist + then id + else targetClients %~ filter ((`elem` connWhitelist) . fakeConnId) + + emptyMeansFullHack :: NotificationTarget -> NotificationTarget + emptyMeansFullHack tgt = tgt & targetClients %~ \case + [] -> clientIdsOfUser env (tgt ^. targetUser) + same@(_:_) -> same + + forM_ clients $ \(userid, clientid) -> do + msWSQueue %= deliver (userid, clientid) (ntfPayload notif) + + pure $ uncurry fakePresence <$> clients + + +---------------------------------------------------------------------- +-- helpers + +-- | (it may be possible to drop this type in favor of more sophisticated use of quickcheck's +-- counterexample.) +newtype Pretty a = Pretty a + deriving (Eq, Ord) + +instance Aeson.ToJSON a => Show (Pretty a) where + show (Pretty a) = cs $ Aeson.encodePretty a + +shrinkPretty :: HasCallStack => (a -> [a]) -> Pretty a -> [Pretty a] +shrinkPretty shrnk (Pretty xs) = Pretty <$> shrnk xs + +sublist1Of :: HasCallStack => [a] -> Gen (List1 a) +sublist1Of [] = error "sublist1Of: empty list" +sublist1Of xs = sublistOf xs >>= \case + [] -> sublist1Of xs + c:cc -> pure (list1 c cc) + +unsafeList1 :: HasCallStack => [a] -> List1 a +unsafeList1 [] = error "unsafeList1: empty list" +unsafeList1 (x:xs) = list1 x xs + +deliver :: (UserId, ClientId) -> Payload -> NotifQueue -> NotifQueue +deliver qkey qval queue = Map.alter (Just . tweak) qkey queue + where + tweak Nothing = MSet.singleton (payloadToInt qval) + tweak (Just qvals) = MSet.insert (payloadToInt qval) qvals + +-- | Get the number contained in the payload. +payloadToInt :: Payload -> Int +payloadToInt (List1 (toList -> [toList -> [Number x]])) + | Just n <- Scientific.toBoundedInteger x = n +payloadToInt bad = error $ "unexpected Payload: " <> show bad + +mkWSStatus :: MockGundeck (PushTarget -> PushStatus) +mkWSStatus = do + env <- ask + pure $ \trgt -> if wsReachable env (ptUserId trgt, clientIdFromConnId $ ptConnId trgt) + then PushStatusOk + else PushStatusGone + + +wsReachable :: MockEnv -> (UserId, ClientId) -> Bool +wsReachable (MockEnv mp) (uid, cid) = maybe False (^. ciWSReachable) $ + (Map.lookup uid >=> Map.lookup cid) mp + +nativeReachable :: MockEnv -> (UserId, ClientId) -> Bool +nativeReachable (MockEnv mp) (uid, cid) = maybe False (^. _2) $ + (Map.lookup uid >=> Map.lookup cid >=> (^. ciNativeAddress)) mp + +nativeReachableAddr :: MockEnv -> Address s -> Bool +nativeReachableAddr env addr = nativeReachable env (addr ^. addrUser, addr ^. addrClient) + +allUsers :: MockEnv -> [UserId] +allUsers = fmap fst . allRecipients + +allRecipients :: MockEnv -> [(UserId, [ClientId])] +allRecipients (MockEnv mp) = (_2 %~ Map.keys) <$> Map.toList mp + +clientIdsOfUser :: HasCallStack => MockEnv -> UserId -> [ClientId] +clientIdsOfUser (MockEnv mp) uid = + maybe (error "unknown UserId") Map.keys $ Map.lookup uid mp + + +-- | See also: 'fakePresence'. +fakePresences :: (UserId, [ClientId]) -> [Presence] +fakePresences (uid, cids) = fakePresence uid <$> cids + +-- | See also: 'fakePresence'. +fakePresences' :: MockEnv -> Recipient -> [Presence] +fakePresences' env (Recipient uid _ cids) = + fakePresence uid <$> case cids of + RecipientClientsAll -> clientIdsOfUser env uid + RecipientClientsSome cc -> toList cc + +-- | Currently, we only create 'Presence's from 'Push' requests, which contains 'ClientId's, but no +-- 'ConnId's. So in contrast to the production code where the two are generated independently, we +-- maintain identity (except for the type) between 'ClientId' and 'ConnId'. (This makes switching +-- back between the two trivial without having to maintain a stateful mapping.) Furthermore, we do +-- not cover the @isNothing (clientId prc)@ case. +fakePresence :: UserId -> ClientId -> Presence +fakePresence userId clientId_ = Presence {..} + where + clientId = Just clientId_ + connId = fakeConnId clientId_ + resource = URI . fromJust $ URI.parseURI "http://127.0.0.1:8080" + createdAt = 0 + __field = mempty + +-- | See also: 'fakePresence'. +fakeConnId :: ClientId -> ConnId +fakeConnId = ConnId . cs . client + +clientIdFromConnId :: ConnId -> ClientId +clientIdFromConnId = ClientId . cs . fromConnId diff --git a/services/gundeck/test/unit/Push.hs b/services/gundeck/test/unit/Push.hs new file mode 100644 index 00000000000..435bb401b43 --- /dev/null +++ b/services/gundeck/test/unit/Push.hs @@ -0,0 +1,85 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE ViewPatterns #-} + +{-# OPTIONS_GHC -Wno-incomplete-patterns #-} + +module Push where + +import Imports +import Gundeck.Push (pushAll, pushAny) +import Gundeck.Push.Websocket as Web (bulkPush) +import Gundeck.Types +import MockGundeck +import Test.QuickCheck +import Test.QuickCheck.Instances () +import Test.Tasty +import Test.Tasty.QuickCheck + +import qualified Data.Aeson as Aeson + + +tests :: TestTree +tests = testGroup "bulkpush" $ + [ testProperty "web sockets" webBulkPushProps + , testProperty "native pushes" pushAllProps + ] + + +mkEnv :: (Pretty MockEnv -> Property) -> Positive Int -> Property +mkEnv prop (Positive len) = forAllShrink (Pretty <$> resize len genMockEnv) (shrinkPretty shrinkMockEnv) prop + + +webBulkPushProps :: Positive Int -> Property +webBulkPushProps plen@(Positive len) = mkEnv mkNotifs plen + where + mkNotifs :: Pretty MockEnv -> Property + mkNotifs (Pretty env) = forAllShrink + (Pretty <$> resize len (genNotifs env)) + (shrinkPretty shrinkNotifs) + (webBulkPushProp env) + +webBulkPushProp :: MockEnv -> Pretty [(Notification, [Presence])] -> Property +webBulkPushProp env (Pretty notifs) = + counterexample "^ environment, notifications\n" $ + conjoin props + where + (realout, realst) = runMockGundeck env $ Web.bulkPush notifs + (mockout, mockst) = runMockGundeck env $ mockBulkPush notifs + props = [ realst === mockst + , sort realout === sort mockout + ] + + +pushAllProps :: Positive Int -> Property +pushAllProps plen@(Positive len) = mkEnv mkPushes plen + where + mkPushes :: Pretty MockEnv -> Property + mkPushes (Pretty env) = forAllShrink + (Pretty <$> resize len (genPushes env)) + (shrinkPretty shrinkPushes) + (pushAllProp env) + +pushAllProp :: MockEnv -> Pretty [Push] -> Property +pushAllProp env (Pretty pushes) = + counterexample "^ environment, pushes\n" $ + conjoin props + where + ((), realst) = runMockGundeck env (pushAll pushes) + ((), mockst) = runMockGundeck env (mockPushAll pushes) + (errs, oldst) = runMockGundeck env (pushAny pushes) + props = [ (Aeson.eitherDecode . Aeson.encode) pushes === Right pushes + , (Aeson.eitherDecode . Aeson.encode) env === Right env + , counterexample "real vs. mock:" $ realst === mockst + , counterexample "real vs. old:" $ realst === oldst + , counterexample "old errors:" $ isRight errs === True + ] diff --git a/services/nginz/Dockerfile b/services/nginz/Dockerfile index 69af16f65f5..b54cb847eef 100644 --- a/services/nginz/Dockerfile +++ b/services/nginz/Dockerfile @@ -130,7 +130,8 @@ RUN apk add --no-cache inotify-tools dumb-init bash curl && \ \ # forward request and error logs to docker log collector && ln -sf /dev/stdout /var/log/nginx/access.log \ - && ln -sf /dev/stderr /var/log/nginx/error.log + && ln -sf /dev/stderr /var/log/nginx/error.log \ + && apk add --no-cache libgcc COPY services/nginz/nginz_reload.sh /usr/bin/nginz_reload.sh diff --git a/services/nginz/Makefile b/services/nginz/Makefile index 33efc2df7df..28ca1eab263 100644 --- a/services/nginz/Makefile +++ b/services/nginz/Makefile @@ -11,6 +11,8 @@ DEB := $(NAME)_$(NGINZ_VERSION)_amd64.deb ifeq ($(DEBUG), 1) WITH_DEBUG = --with-debug endif +DOCKER_USER ?= quay.io/wire +DOCKER_TAG ?= local DEST_PATH ?= /opt/nginz # Use a folder that can be written to since errors during startup do not respect @@ -117,7 +119,9 @@ $(NGINX_BUNDLE): .PHONY: docker docker: git submodule update --init - docker build -f Dockerfile -t nginz ../.. + docker build -t $(DOCKER_USER)/nginz:$(DOCKER_TAG) -f Dockerfile ../.. + docker tag $(DOCKER_USER)/nginz:$(DOCKER_TAG) $(DOCKER_USER)/nginz:latest + if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/nginz:$(DOCKER_TAG); docker push $(DOCKER_USER)/nginz:latest; fi; .PHONY: libzauth libzauth: diff --git a/services/proxy/Makefile b/services/proxy/Makefile index d4ef2a49bb3..06d4220b38c 100644 --- a/services/proxy/Makefile +++ b/services/proxy/Makefile @@ -7,6 +7,8 @@ BUILD_LABEL ?= local BUILD := $(BUILD_NUMBER)$(shell [ "${BUILD_LABEL}" == "" ] && echo "" || echo ".${BUILD_LABEL}") DEB := $(NAME)_$(VERSION)+$(BUILD)_amd64.deb EXECUTABLES := proxy +DOCKER_USER ?= quay.io/wire +DOCKER_TAG ?= local guard-%: @ if [ "${${*}}" = "" ]; then \ @@ -56,11 +58,12 @@ $(DEB): .PHONY: docker docker: $(foreach executable,$(EXECUTABLES),\ - docker build -t $(executable) \ - -f ../../build/alpine/Dockerfile \ - --build-arg service=$(NAME) \ + docker build -t $(DOCKER_USER)/$(executable):$(DOCKER_TAG) \ + -f ../../build/alpine/Dockerfile.executable \ --build-arg executable=$(executable) \ - ../.. \ + ../.. && \ + docker tag $(DOCKER_USER)/$(executable):$(DOCKER_TAG) $(DOCKER_USER)/$(executable):latest && \ + if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/$(executable):$(DOCKER_TAG); docker push $(DOCKER_USER)/$(executable):latest; fi \ ;) .PHONY: time diff --git a/services/spar/Makefile b/services/spar/Makefile index 2d60b47948f..f168b7db3e3 100644 --- a/services/spar/Makefile +++ b/services/spar/Makefile @@ -10,6 +10,8 @@ EXE_SCHEMA := ../../dist/$(NAME)-schema DEB := dist/$(NAME)_$(VERSION)+$(BUILD)_amd64.deb DEB_SCHEMA := dist/$(NAME)-schema_$(VERSION)+$(BUILD)_amd64.deb EXECUTABLES := $(NAME) $(NAME)-integration $(NAME)-schema +DOCKER_USER ?= quay.io/wire +DOCKER_TAG ?= local guard-%: @ if [ "${${*}}" = "" ]; then \ @@ -100,11 +102,12 @@ db-migrate: fast .PHONY: docker docker: $(foreach executable,$(EXECUTABLES),\ - docker build -t $(executable) \ - -f ../../build/alpine/Dockerfile \ - --build-arg service=$(NAME) \ + docker build -t $(DOCKER_USER)/$(executable):$(DOCKER_TAG) \ + -f ../../build/alpine/Dockerfile.executable \ --build-arg executable=$(executable) \ - ../.. \ + ../.. && \ + docker tag $(DOCKER_USER)/$(executable):$(DOCKER_TAG) $(DOCKER_USER)/$(executable):latest && \ + if test -n "$$DOCKER_PUSH"; then docker login -u $(DOCKER_USERNAME) -p $(DOCKER_PASSWORD); docker push $(DOCKER_USER)/$(executable):$(DOCKER_TAG); docker push $(DOCKER_USER)/$(executable):latest; fi \ ;) .PHONY: time diff --git a/services/spar/src/Spar/API.hs b/services/spar/src/Spar/API.hs index 63e2620ac22..138c65861cf 100644 --- a/services/spar/src/Spar/API.hs +++ b/services/spar/src/Spar/API.hs @@ -49,8 +49,8 @@ import Spar.API.Types import Spar.App import Spar.Error import Spar.Types -import Spar.SCIM -import Spar.SCIM.Swagger () +import Spar.Scim +import Spar.Scim.Swagger () import qualified Data.ByteString as SBS import qualified Data.ByteString.Base64 as ES diff --git a/services/spar/src/Spar/API/Types.hs b/services/spar/src/Spar/API/Types.hs index 3369f3ab1f6..3b6b47240bc 100644 --- a/services/spar/src/Spar/API/Types.hs +++ b/services/spar/src/Spar/API/Types.hs @@ -27,7 +27,7 @@ import Servant import Servant.Multipart import Spar.Types import Spar.API.Util -import Spar.SCIM (APIScim) +import Spar.Scim (APIScim) import qualified SAML2.WebSSO as SAML import qualified URI.ByteString as URI diff --git a/services/spar/src/Spar/App.hs b/services/spar/src/Spar/App.hs index 0333c3534a1..50bc51e0283 100644 --- a/services/spar/src/Spar/App.hs +++ b/services/spar/src/Spar/App.hs @@ -269,9 +269,12 @@ verdictHandlerResult bindCky = catchVerdictErrors . \case | uid == uid' -> pure uid -- redundant binding (no change to brig or spar) | otherwise -> throwSpar SparBindUserRefTaken -- attempt to use ssoid for a second wire user - cky :: SetCookie <- Intra.ssoLogin uid + SAML.logger SAML.Debug ("granting sso login for " <> show uid) + mcky :: Maybe SetCookie <- Intra.ssoLogin uid -- (creating users is synchronous and does a quorum vote, so there is no race condition here.) - pure $ VerifyHandlerGranted cky uid + case mcky of + Just cky -> pure $ VerifyHandlerGranted cky uid + Nothing -> throwSpar $ SparBrigError "sso-login failed (race condition?)" -- | If the client is web, it will be served with an HTML page that it can process to decide whether -- to log the user in or show an error. diff --git a/services/spar/src/Spar/Error.hs b/services/spar/src/Spar/Error.hs index fbd447aad3d..301c3359cbb 100644 --- a/services/spar/src/Spar/Error.hs +++ b/services/spar/src/Spar/Error.hs @@ -49,6 +49,7 @@ data SparCustomError | SparNoBodyInBrigResponse | SparCouldNotParseBrigResponse LT | SparBrigError LT + | SparBrigErrorWith Status LT | SparNoBodyInGalleyResponse | SparCouldNotParseGalleyResponse LT | SparGalleyError LT @@ -92,6 +93,7 @@ sparToWaiError (SAML.CustomError (SparBadUserName msg)) = Righ sparToWaiError (SAML.CustomError SparNoBodyInBrigResponse) = Right $ Wai.Error status502 "bad-upstream" "Failed to get a response from an upstream server." sparToWaiError (SAML.CustomError (SparCouldNotParseBrigResponse msg)) = Right $ Wai.Error status502 "bad-upstream" ("Could not parse response body: " <> msg) sparToWaiError (SAML.CustomError (SparBrigError msg)) = Right $ Wai.Error status500 "bad-upstream" msg +sparToWaiError (SAML.CustomError (SparBrigErrorWith status msg)) = Right $ Wai.Error status "bad-upstream" msg -- Galley-specific errors sparToWaiError (SAML.CustomError SparNoBodyInGalleyResponse) = Right $ Wai.Error status502 "bad-upstream" "Failed to get a response from an upstream server." sparToWaiError (SAML.CustomError (SparCouldNotParseGalleyResponse msg)) = Right $ Wai.Error status502 "bad-upstream" ("Could not parse response body: " <> msg) diff --git a/services/spar/src/Spar/Intra/Brig.hs b/services/spar/src/Spar/Intra/Brig.hs index 753e18e6948..c284577218b 100644 --- a/services/spar/src/Spar/Intra/Brig.hs +++ b/services/spar/src/Spar/Intra/Brig.hs @@ -4,6 +4,7 @@ {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE MultiWayIf #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE ScopedTypeVariables #-} @@ -27,6 +28,7 @@ import Control.Monad.Except import Data.Aeson (FromJSON, eitherDecode') import Data.ByteString.Conversion import Data.Id (Id(Id), UserId, TeamId) +import Data.Ix import Data.Range import Data.String.Conversions import Network.HTTP.Types.Method @@ -116,11 +118,15 @@ createUser suid (Id buid) teamid mbName = do $ method POST . path "/i/users" . json newUser - . expect2xx - userId . selfUser <$> parseResponse @SelfProfile resp + if | statusCode resp < 300 + -> userId . selfUser <$> parseResponse @SelfProfile resp + | inRange (400, 499) (statusCode resp) + -> throwSpar . SparBrigErrorWith (responseStatus resp) $ "create user failed" + | otherwise + -> throwSpar . SparBrigError . cs $ "create user failed with status " <> show (statusCode resp) --- | Get a user; returns 'Nothing' if the user was not found. +-- | Get a user; returns 'Nothing' if the user was not found or has been deleted. getUser :: (HasCallStack, MonadError SparError m, MonadSparToBrig m) => UserId -> m (Maybe User) getUser buid = do resp :: Response (Maybe LBS) <- call @@ -128,7 +134,11 @@ getUser buid = do . path "/self" . header "Z-User" (toByteString' buid) case statusCode resp of - 200 -> Just . selfUser <$> parseResponse @SelfProfile resp + 200 -> do + user <- selfUser <$> parseResponse @SelfProfile resp + pure $ if (userDeleted user) + then Nothing + else Just user 404 -> pure Nothing _ -> throwSpar (SparBrigError "Could not retrieve user") @@ -152,15 +162,22 @@ getUserByHandle handle = do parse (x:[]) = Just $ accountUser x parse _ = Nothing -- TODO: What if more accounts get returned? --- | Set user's handle. +-- | Set user's handle. Fails with status <500 if brig fails with <500, and with 500 if brig fails +-- with >= 500. setHandle :: (HasCallStack, MonadError SparError m, MonadSparToBrig m) => UserId -> Handle -> m () -setHandle buid (Handle handle) = void $ call +setHandle buid (Handle handle) = do + resp <- call $ method PUT . path "/self/handle" . header "Z-User" (toByteString' buid) . header "Z-Connection" "" - . expect2xx . json (HandleUpdate handle) + if | statusCode resp < 300 + -> pure () + | inRange (400, 499) (statusCode resp) + -> throwSpar . SparBrigErrorWith (responseStatus resp) $ "set handle failed" + | otherwise + -> throwSpar . SparBrigError . cs $ "set handle failed with status " <> show (statusCode resp) -- | This works under the assumption that the user must exist on brig. If it does not, brig -- responds with 404 and this function returns 'False'. @@ -208,13 +225,19 @@ getZUsrOwnedTeam (Just uid) = do -- | Get persistent cookie from brig and redirect user past login process. +-- +-- If brig responds with status >=400;<500, return Nothing. Otherwise, crash (500). ssoLogin :: (HasCallStack, MonadError SparError m, SAML.HasConfig m, MonadSparToBrig m) - => UserId -> m SetCookie + => UserId -> m (Maybe SetCookie) ssoLogin buid = do resp :: Response (Maybe LBS) <- call $ method POST . path "/i/sso-login" . json (SsoLogin buid Nothing) . queryItem "persist" "true" - . expect2xx - respToCookie resp + if | statusCode resp < 300 + -> Just <$> respToCookie resp + | inRange (400, 499) (statusCode resp) + -> pure Nothing + | otherwise + -> throwSpar . SparBrigError . cs $ "sso-login failed with status " <> show (statusCode resp) diff --git a/services/spar/src/Spar/SCIM.hs b/services/spar/src/Spar/Scim.hs similarity index 69% rename from services/spar/src/Spar/SCIM.hs rename to services/spar/src/Spar/Scim.hs index 962e634bfd8..29575c51e39 100644 --- a/services/spar/src/Spar/SCIM.hs +++ b/services/spar/src/Spar/Scim.hs @@ -24,7 +24,7 @@ -- | An implementation of the SCIM API for doing bulk operations with users. -- -- See -module Spar.SCIM +module Spar.Scim ( -- * The API APIScim @@ -71,45 +71,45 @@ import qualified Spar.Data as Data import qualified Data.ByteString.Base64 as ES import qualified Spar.Intra.Brig as Intra.Brig -import qualified Web.SCIM.Class.User as SCIM -import qualified Web.SCIM.Class.Group as SCIM -import qualified Web.SCIM.Class.Auth as SCIM -import qualified Web.SCIM.Server as SCIM -import qualified Web.SCIM.Handler as SCIM -import qualified Web.SCIM.Filter as SCIM -import qualified Web.SCIM.Schema.Common as SCIM -import qualified Web.SCIM.Schema.Meta as SCIM -import qualified Web.SCIM.Schema.ResourceType as SCIM -import qualified Web.SCIM.Schema.ListResponse as SCIM -import qualified Web.SCIM.Schema.Error as SCIM +import qualified Web.Scim.Class.User as Scim +import qualified Web.Scim.Class.Group as Scim +import qualified Web.Scim.Class.Auth as Scim +import qualified Web.Scim.Server as Scim +import qualified Web.Scim.Handler as Scim +import qualified Web.Scim.Filter as Scim +import qualified Web.Scim.Schema.Common as Scim +import qualified Web.Scim.Schema.Meta as Scim +import qualified Web.Scim.Schema.ResourceType as Scim +import qualified Web.Scim.Schema.ListResponse as Scim +import qualified Web.Scim.Schema.Error as Scim -import qualified Web.SCIM.Schema.User as SCIM.User -import qualified Web.SCIM.Schema.User.Email as SCIM.User -import qualified Web.SCIM.Schema.User.Phone as SCIM.User -import qualified Web.SCIM.Schema.User.Name as SCIM.User +import qualified Web.Scim.Schema.User as Scim.User +import qualified Web.Scim.Schema.User.Email as Scim.User +import qualified Web.Scim.Schema.User.Phone as Scim.User +import qualified Web.Scim.Schema.User.Name as Scim.User -import qualified Web.SCIM.Capabilities.MetaSchema as SCIM.Meta +import qualified Web.Scim.Capabilities.MetaSchema as Scim.Meta -import qualified Web.SCIM.Schema.Common as SCIM.Common +import qualified Web.Scim.Schema.Common as Scim.Common -- | SCIM config for our server. -- --- TODO: the 'SCIM.Meta.empty' configuration claims that we don't support +-- TODO: the 'Scim.Meta.empty' configuration claims that we don't support -- filters, but we actually do; it's a bug in hscim -configuration :: SCIM.Meta.Configuration -configuration = SCIM.Meta.empty +configuration :: Scim.Meta.Configuration +configuration = Scim.Meta.empty type APIScim - = OmitDocs :> "v2" :> SCIM.SiteAPI ScimToken + = OmitDocs :> "v2" :> Scim.SiteAPI ScimToken :<|> "auth-tokens" :> APIScimToken apiScim :: ServerT APIScim Spar -apiScim = hoistSCIM (toServant (SCIM.siteServer configuration)) +apiScim = hoistScim (toServant (Scim.siteServer configuration)) :<|> apiScimToken where - hoistSCIM = hoistServer (Proxy @(SCIM.SiteAPI ScimToken)) - (SCIM.fromSCIMHandler fromError) - fromError = throwError . SAML.CustomServant . SCIM.scimToServantErr + hoistScim = hoistServer (Proxy @(Scim.SiteAPI ScimToken)) + (Scim.fromScimHandler fromError) + fromError = throwError . SAML.CustomServant . Scim.scimToServantErr ---------------------------------------------------------------------------- -- UserDB @@ -137,28 +137,28 @@ apiScim = hoistSCIM (toServant (SCIM.siteServer configuration)) -- value and the @displayName@ won't be affected by that change. -- | Expose a Wire user as an SCIM user. -toSCIMUser :: User -> SCIM.StoredUser -toSCIMUser user = SCIM.WithMeta meta thing +toScimUser :: User -> Scim.StoredUser +toScimUser user = Scim.WithMeta meta thing where -- User ID in text format idText = idToText (Brig.userId user) -- The representation of the user, without the meta information - thing = SCIM.WithId idText $ SCIM.User.empty - { SCIM.User.userName = maybe idText fromHandle (userHandle user) - , SCIM.User.name = Just emptySCIMName - , SCIM.User.displayName = Just (fromName (userName user)) + thing = Scim.WithId idText $ Scim.User.empty + { Scim.User.userName = maybe idText fromHandle (userHandle user) + , Scim.User.name = Just emptyScimName + , Scim.User.displayName = Just (fromName (userName user)) } -- The hash of the user representation (used as a version, i.e. ETag) thingHash = hashlazy (Aeson.encode thing) :: Digest SHA256 -- Meta-info about the user - meta = SCIM.Meta - { SCIM.resourceType = SCIM.UserResource - , SCIM.created = testDate - , SCIM.lastModified = testDate - , SCIM.version = SCIM.Strong (Text.pack (show thingHash)) + meta = Scim.Meta + { Scim.resourceType = Scim.UserResource + , Scim.created = testDate + , Scim.lastModified = testDate + , Scim.version = Scim.Strong (Text.pack (show thingHash)) -- TODO: The location should be /Users/. It might also have to -- include the baseurl of our server -- this has to be checked. - , SCIM.location = SCIM.URI $ URI "https://TODO" Nothing "" "" "" + , Scim.location = Scim.URI $ URI "https://TODO" Nothing "" "" "" } -- 2018-01-01 00:00 @@ -170,15 +170,15 @@ testDate = UTCTime , utctDayTime = 0 } -emptySCIMName :: SCIM.User.Name -emptySCIMName = - SCIM.User.Name - { SCIM.User.formatted = Nothing - , SCIM.User.givenName = Just "" - , SCIM.User.familyName = Just "" - , SCIM.User.middleName = Nothing - , SCIM.User.honorificPrefix = Nothing - , SCIM.User.honorificSuffix = Nothing +emptyScimName :: Scim.User.Name +emptyScimName = + Scim.User.Name + { Scim.User.formatted = Nothing + , Scim.User.givenName = Just "" + , Scim.User.familyName = Just "" + , Scim.User.middleName = Nothing + , Scim.User.honorificPrefix = Nothing + , Scim.User.honorificSuffix = Nothing } {- TODO: might be useful later. @@ -188,35 +188,35 @@ emptySCIMName = -- name and last name, so we break our names up to satisfy Okta). -- -- TODO: use the same algorithm as Wire clients use. -toSCIMName :: Name -> SCIM.User.Name -toSCIMName (Name name) = - SCIM.User.Name - { SCIM.User.formatted = Just name - , SCIM.User.givenName = Just first - , SCIM.User.familyName = if Text.null rest then Nothing else Just rest - , SCIM.User.middleName = Nothing - , SCIM.User.honorificPrefix = Nothing - , SCIM.User.honorificSuffix = Nothing +toScimName :: Name -> Scim.User.Name +toScimName (Name name) = + Scim.User.Name + { Scim.User.formatted = Just name + , Scim.User.givenName = Just first + , Scim.User.familyName = if Text.null rest then Nothing else Just rest + , Scim.User.middleName = Nothing + , Scim.User.honorificPrefix = Nothing + , Scim.User.honorificSuffix = Nothing } where (first, Text.drop 1 -> rest) = Text.breakOn " " name -- | Convert from the Wire phone type to the SCIM phone type. -toSCIMPhone :: Phone -> SCIM.User.Phone -toSCIMPhone (Phone phone) = - SCIM.User.Phone - { SCIM.User.typ = Nothing - , SCIM.User.value = Just phone +toScimPhone :: Phone -> Scim.User.Phone +toScimPhone (Phone phone) = + Scim.User.Phone + { Scim.User.typ = Nothing + , Scim.User.value = Just phone } -- | Convert from the Wire email type to the SCIM email type. -toSCIMEmail :: Email -> SCIM.User.Email -toSCIMEmail (Email eLocal eDomain) = - SCIM.User.Email - { SCIM.User.typ = Nothing - , SCIM.User.value = SCIM.User.EmailAddress2 +toScimEmail :: Email -> Scim.User.Email +toScimEmail (Email eLocal eDomain) = + Scim.User.Email + { Scim.User.typ = Nothing + , Scim.User.value = Scim.User.EmailAddress2 (unsafeEmailAddress (encodeUtf8 eLocal) (encodeUtf8 eDomain)) - , SCIM.User.primary = Just True + , Scim.User.primary = Just True } -} @@ -229,72 +229,72 @@ toSCIMEmail (Email eLocal eDomain) = -- 1. We want all errors originating from SCIM handlers to be thrown as SCIM -- errors, not as Spar errors. Currently errors thrown from things like -- 'getTeamMembers' will look like Spar errors and won't be wrapped into --- the 'SCIMError' type. This might or might not be important, depending +-- the 'ScimError' type. This might or might not be important, depending -- on what is expected by apps that use the SCIM interface. -- -- 2. We want generic error descriptions in response bodies, while still -- logging nice error messages internally. -instance SCIM.UserDB Spar where +instance Scim.UserDB Spar where -- | List all users, possibly filtered by some predicate. list :: ScimTokenInfo - -> Maybe SCIM.Filter - -> SCIM.SCIMHandler Spar (SCIM.ListResponse SCIM.StoredUser) + -> Maybe Scim.Filter + -> Scim.ScimHandler Spar (Scim.ListResponse Scim.StoredUser) list ScimTokenInfo{stiTeam} mbFilter = do members <- lift $ getTeamMembers stiTeam users <- fmap catMaybes $ forM members $ \member -> lift (Intra.Brig.getUser (member ^. Galley.userId)) >>= \case Just user | userDeleted user -> pure Nothing - | otherwise -> pure (Just (toSCIMUser user)) - Nothing -> SCIM.throwSCIM $ - SCIM.serverError "SCIM.UserDB.list: couldn't fetch team member" + | otherwise -> pure (Just (toScimUser user)) + Nothing -> Scim.throwScim $ + Scim.serverError "Scim.UserDB.list: couldn't fetch team member" let check user = case mbFilter of Nothing -> pure True Just filter_ -> - let user' = SCIM.Common.value (SCIM.thing user) - in case SCIM.filterUser filter_ user' of + let user' = Scim.Common.value (Scim.thing user) + in case Scim.filterUser filter_ user' of Right res -> pure res - Left err -> SCIM.throwSCIM $ - SCIM.badRequest SCIM.InvalidFilter (Just err) + Left err -> Scim.throwScim $ + Scim.badRequest Scim.InvalidFilter (Just err) -- TODO: once bigger teams arrive, we should have pagination here. - SCIM.fromList <$> filterM check users + Scim.fromList <$> filterM check users -- | Get a single user by its ID. get :: ScimTokenInfo -> Text - -> SCIM.SCIMHandler Spar (Maybe SCIM.StoredUser) + -> Scim.ScimHandler Spar (Maybe Scim.StoredUser) get ScimTokenInfo{stiTeam} uidText = do uid <- case readMaybe (Text.unpack uidText) of Just u -> pure u - Nothing -> SCIM.throwSCIM $ - SCIM.notFound "user" uidText + Nothing -> Scim.throwScim $ + Scim.notFound "user" uidText lift (Intra.Brig.getUser uid) >>= traverse (\user -> do when (userTeam user /= Just stiTeam || userDeleted user) $ - SCIM.throwSCIM $ SCIM.notFound "user" (idToText uid) - pure (toSCIMUser user)) + Scim.throwScim $ Scim.notFound "user" (idToText uid) + pure (toScimUser user)) -- | Create a new user. create :: ScimTokenInfo - -> SCIM.User.User - -> SCIM.SCIMHandler Spar SCIM.StoredUser + -> Scim.User.User + -> Scim.ScimHandler Spar Scim.StoredUser create ScimTokenInfo{stiIdP} user = do - extId <- case SCIM.User.externalId user of + extId <- case Scim.User.externalId user of Just x -> pure x - Nothing -> SCIM.throwSCIM $ - SCIM.badRequest SCIM.InvalidValue (Just "externalId is required") - handl <- case parseHandle (SCIM.User.userName user) of + Nothing -> Scim.throwScim $ + Scim.badRequest Scim.InvalidValue (Just "externalId is required") + handl <- case parseHandle (Scim.User.userName user) of Just x -> pure x - Nothing -> SCIM.throwSCIM $ - SCIM.badRequest SCIM.InvalidValue (Just "userName is not compliant") + Nothing -> Scim.throwScim $ + Scim.badRequest Scim.InvalidValue (Just "userName is not compliant") -- We check the name for validity, but only if it's present - mbName <- forM (SCIM.User.displayName user) $ \n -> + mbName <- forM (Scim.User.displayName user) $ \n -> case checkedEitherMsg @_ @1 @128 "displayName" n of Right x -> pure $ Name (fromRange x) - Left err -> SCIM.throwSCIM $ - SCIM.badRequest - SCIM.InvalidValue + Left err -> Scim.throwScim $ + Scim.badRequest + Scim.InvalidValue (Just ("displayName is not compliant: " <> Text.pack err)) -- NB: We assume that checking that the user does _not_ exist has -- already been done before -- the hscim library check does a 'get' @@ -303,55 +303,55 @@ instance SCIM.UserDB Spar where -- TODO: Assume that externalID is the subjectID, let's figure out how -- to extract that later issuer <- case stiIdP of - Nothing -> SCIM.throwSCIM $ - SCIM.serverError "No IdP configured for the provisioning token" + Nothing -> Scim.throwScim $ + Scim.serverError "No IdP configured for the provisioning token" Just idp -> lift (wrapMonadClient (Data.getIdPConfig idp)) >>= \case - Nothing -> SCIM.throwSCIM $ - SCIM.serverError "The IdP corresponding to the provisioning token \ + Nothing -> Scim.throwScim $ + Scim.serverError "The IdP corresponding to the provisioning token \ \was not found" Just idpConfig -> pure (idpConfig ^. SAML.idpMetadata . SAML.edIssuer) let uref = SAML.UserRef issuer (SAML.opaqueNameID extId) -- TODO: Adding a handle should be done _DURING_ the creation buid <- lift $ createUser uref mbName - lift $ Intra.Brig.setHandle buid handl + lift $ Intra.Brig.setHandle buid handl -- TODO: handle errors better here? - maybe (SCIM.throwSCIM (SCIM.serverError "SCIM.UserDB.create: user disappeared")) - (pure . toSCIMUser) =<< + maybe (Scim.throwScim (Scim.serverError "Scim.UserDB.create: user disappeared")) + (pure . toScimUser) =<< lift (Intra.Brig.getUser buid) update :: ScimTokenInfo -> Text - -> SCIM.User.User - -> SCIM.SCIMHandler Spar SCIM.StoredUser + -> Scim.User.User + -> Scim.ScimHandler Spar Scim.StoredUser update _ _ _ = - SCIM.throwSCIM $ SCIM.serverError "User update is not implemented yet" + Scim.throwScim $ Scim.serverError "User update is not implemented yet" - delete :: ScimTokenInfo -> Text -> SCIM.SCIMHandler Spar Bool + delete :: ScimTokenInfo -> Text -> Scim.ScimHandler Spar Bool delete _ _ = - SCIM.throwSCIM $ SCIM.serverError "User delete is not implemented yet" + Scim.throwScim $ Scim.serverError "User delete is not implemented yet" - getMeta :: ScimTokenInfo -> SCIM.SCIMHandler Spar SCIM.Meta + getMeta :: ScimTokenInfo -> Scim.ScimHandler Spar Scim.Meta getMeta _ = - SCIM.throwSCIM $ SCIM.serverError "User getMeta is not implemented yet" + Scim.throwScim $ Scim.serverError "User getMeta is not implemented yet" ---------------------------------------------------------------------------- -- GroupDB -instance SCIM.GroupDB Spar where +instance Scim.GroupDB Spar where -- TODO ---------------------------------------------------------------------------- -- AuthDB -instance SCIM.AuthDB Spar where +instance Scim.AuthDB Spar where type AuthData Spar = ScimToken type AuthInfo Spar = ScimTokenInfo authCheck Nothing = - SCIM.throwSCIM (SCIM.unauthorized "Token not provided") + Scim.throwScim (Scim.unauthorized "Token not provided") authCheck (Just token) = - maybe (SCIM.throwSCIM (SCIM.unauthorized "Invalid token")) pure =<< + maybe (Scim.throwScim (Scim.unauthorized "Invalid token")) pure =<< lift (wrapMonadClient (Data.lookupScimToken token)) -- TODO: don't forget to delete the tokens when the team is deleted diff --git a/services/spar/src/Spar/SCIM/Swagger.hs b/services/spar/src/Spar/Scim/Swagger.hs similarity index 98% rename from services/spar/src/Spar/SCIM/Swagger.hs rename to services/spar/src/Spar/Scim/Swagger.hs index 4979e951d82..ac19b85f25c 100644 --- a/services/spar/src/Spar/SCIM/Swagger.hs +++ b/services/spar/src/Spar/Scim/Swagger.hs @@ -17,7 +17,7 @@ {-# OPTIONS_GHC -Wno-orphans #-} -module Spar.SCIM.Swagger where +module Spar.Scim.Swagger where import Imports import Control.Lens @@ -29,7 +29,7 @@ import "swagger2" Data.Swagger hiding (Header(..)) -- clash other than -XPackageImports. import Spar.Orphans () import Spar.Types -import Spar.SCIM +import Spar.Scim import qualified SAML2.WebSSO as SAML diff --git a/services/spar/test-integration/Spec.hs b/services/spar/test-integration/Spec.hs index cb3fe363fc7..629c7d7feab 100644 --- a/services/spar/test-integration/Spec.hs +++ b/services/spar/test-integration/Spec.hs @@ -20,7 +20,7 @@ import qualified Test.Spar.APISpec import qualified Test.Spar.AppSpec import qualified Test.Spar.DataSpec import qualified Test.Spar.Intra.BrigSpec -import qualified Test.Spar.SCIMSpec +import qualified Test.Spar.ScimSpec main :: IO () @@ -43,4 +43,4 @@ mkspec = do describe "Test.Spar.App" Test.Spar.AppSpec.spec describe "Test.Spar.Data" Test.Spar.DataSpec.spec describe "Test.Spar.Intra.Brig" Test.Spar.Intra.BrigSpec.spec - describe "Test.Spar.SCIM" Test.Spar.SCIMSpec.spec + describe "Test.Spar.Scim" Test.Spar.ScimSpec.spec diff --git a/services/spar/test-integration/Test/Spar/APISpec.hs b/services/spar/test-integration/Test/Spar/APISpec.hs index ccaac1d9d8b..0cb2ef912d1 100644 --- a/services/spar/test-integration/Test/Spar/APISpec.hs +++ b/services/spar/test-integration/Test/Spar/APISpec.hs @@ -25,6 +25,7 @@ import Data.UUID as UUID hiding (null, fromByteString) import Data.UUID.V4 as UUID import SAML2.WebSSO as SAML import SAML2.WebSSO.Test.MockResponse +import SAML2.WebSSO.Test.Lenses import Spar.API.Types import Spar.Types import URI.ByteString.QQ (uri) @@ -35,10 +36,10 @@ import qualified Data.ByteString.Builder as LB import qualified Data.ZAuth.Token as ZAuth import qualified Galley.Types.Teams as Galley import qualified Spar.Intra.Brig as Intra -import qualified Util.SCIM as SCIMT +import qualified Util.Scim as ScimT import qualified Web.Cookie as Cky -import qualified Web.SCIM.Class.User as SCIM -import qualified Web.SCIM.Schema.User as SCIM +import qualified Web.Scim.Class.User as Scim +import qualified Web.Scim.Schema.User as Scim spec :: SpecWith TestEnv @@ -49,7 +50,7 @@ spec = do specFinalizeLogin specBindingUsers specCRUDIdentityProvider - specSCIMAndSAML + specScimAndSAML specAux @@ -190,20 +191,69 @@ specFinalizeLogin = do hasPersistentCookieHeader sparresp `shouldBe` Left "no set-cookie header" context "access granted" $ do - it "responds with a very peculiar 'allowed' HTTP response" $ do - (_, _, idp) <- registerTestIdP - (privcreds, authnreq) <- negotiateAuthnRequest idp - spmeta <- getTestSPMetadata - authnresp <- runSimpleSP $ mkAuthnResponse privcreds idp spmeta authnreq True - sparresp <- submitAuthnResponse authnresp - liftIO $ do - statusCode sparresp `shouldBe` 200 - let bdy = maybe "" (cs @LBS @String) (responseBody sparresp) - bdy `shouldContain` "" - bdy `shouldContain` "" - bdy `shouldContain` "wire:sso:success" - bdy `shouldContain` "window.opener.postMessage({type: 'AUTH_SUCCESS'}, receiverOrigin)" - hasPersistentCookieHeader sparresp `shouldBe` Right () + let loginSuccess :: HasCallStack => ResponseLBS -> TestSpar () + loginSuccess sparresp = liftIO $ do + statusCode sparresp `shouldBe` 200 + let bdy = maybe "" (cs @LBS @String) (responseBody sparresp) + bdy `shouldContain` "" + bdy `shouldContain` "" + bdy `shouldContain` "wire:sso:success" + bdy `shouldContain` "window.opener.postMessage({type: 'AUTH_SUCCESS'}, receiverOrigin)" + hasPersistentCookieHeader sparresp `shouldBe` Right () + + context "happy flow" $ do + it "responds with a very peculiar 'allowed' HTTP response" $ do + (_, _, idp) <- registerTestIdP + spmeta <- getTestSPMetadata + (privcreds, authnreq) <- negotiateAuthnRequest idp + authnresp <- runSimpleSP $ mkAuthnResponse privcreds idp spmeta authnreq True + loginSuccess =<< submitAuthnResponse authnresp + + context "user is created once, then deleted in team settings, then can login again." $ do + it "responds with 'allowed'" $ do + (ownerid, teamid, idp) <- registerTestIdP + spmeta <- getTestSPMetadata + + -- first login + newUserAuthnResp :: SignedAuthnResponse <- do + (privcreds, authnreq) <- negotiateAuthnRequest idp + authnresp <- runSimpleSP $ mkAuthnResponse privcreds idp spmeta authnreq True + loginSuccess =<< submitAuthnResponse authnresp + pure $ authnresp + + let newUserRef@(UserRef _ subj) = either (error . show) (^. userRefL) $ + parseFromDocument (fromSignedAuthnResponse newUserAuthnResp) + + -- remove user from team settings + do + env <- ask + newUserId <- getUserIdViaRef newUserRef + resp <- call . get $ + ( (env ^. teGalley) + . header "Z-User" (toByteString' ownerid) + . header "Z-Connection" "fake" + . paths ["teams", toByteString' teamid, "members"] + . expect2xx + ) + liftIO . print $ responseBody resp + + void . call . delete $ + ( (env ^. teGalley) + . header "Z-User" (toByteString' ownerid) + . header "Z-Connection" "fake" + . paths ["teams", toByteString' teamid, "members", toByteString' newUserId] + . Bilge.json (Galley.newTeamMemberDeleteData (Just defPassword)) + . expect2xx + ) + liftIO $ threadDelay 100000 -- make sure deletion is done. if we don't want to take + -- the time, we should find another way to robustly + -- confirm that deletion has compelted in the background. + + -- second login + do + (privcreds, authnreq) <- negotiateAuthnRequest idp + authnresp <- runSimpleSP $ mkAuthnResponseWithSubj subj privcreds idp spmeta authnreq True + loginSuccess =<< submitAuthnResponse authnresp context "unknown user" $ do it "creates the user" $ do @@ -631,19 +681,19 @@ specCRUDIdentityProvider = do liftIO $ idp `shouldBe` idp' -specSCIMAndSAML :: SpecWith TestEnv -specSCIMAndSAML = do +specScimAndSAML :: SpecWith TestEnv +specScimAndSAML = do it "SCIM and SAML work together and SCIM-created users can login" $ do env <- ask -- create a user via scim - (tok, (_, _, idp)) <- SCIMT.registerIdPAndSCIMToken - usr :: SCIM.User <- SCIMT.randomSCIMUser - scimStoredUser :: SCIM.StoredUser <- SCIMT.createUser tok usr - let userid :: UserId = SCIMT.scimUserId scimStoredUser + (tok, (_, _, idp)) <- ScimT.registerIdPAndScimToken + usr :: Scim.User <- ScimT.randomScimUser + scimStoredUser :: Scim.StoredUser <- ScimT.createUser tok usr + let userid :: UserId = ScimT.scimUserId scimStoredUser userref :: UserRef = UserRef tenant subject tenant :: Issuer = idp ^. idpMetadata . edIssuer - subject :: NameID = opaqueNameID . fromMaybe (error "no external id") . SCIM.externalId $ usr + subject :: NameID = opaqueNameID . fromMaybe (error "no external id") . Scim.externalId $ usr -- UserRef maps onto correct UserId in spar (and back). userid' <- getUserIdViaRef' userref diff --git a/services/spar/test-integration/Test/Spar/DataSpec.hs b/services/spar/test-integration/Test/Spar/DataSpec.hs index 530504f531b..fd3440a264f 100644 --- a/services/spar/test-integration/Test/Spar/DataSpec.hs +++ b/services/spar/test-integration/Test/Spar/DataSpec.hs @@ -25,9 +25,9 @@ import Spar.Types import URI.ByteString.QQ (uri) import Util.Core import Util.Types -import Util.SCIM -import Web.SCIM.Schema.Meta as SCIM.Meta -import Web.SCIM.Schema.Common as SCIM.Common +import Util.Scim +import Web.Scim.Schema.Meta as Scim.Meta +import Web.Scim.Schema.Common as Scim.Common spec :: SpecWith TestEnv @@ -233,13 +233,13 @@ testSPStoreID store unstore isalive = do testDeleteTeam :: SpecWith TestEnv testDeleteTeam = it "cleans up all the right tables after deletion" $ do -- Create a team with two users and a SCIM token - (tok, (_uid, tid, idp)) <- registerIdPAndSCIMToken - user1 <- randomSCIMUser - user2 <- randomSCIMUser + (tok, (_uid, tid, idp)) <- registerIdPAndScimToken + user1 <- randomScimUser + user2 <- randomScimUser storedUser1 <- createUser tok user1 storedUser2 <- createUser tok user2 -- Resolve the users' SSO ids - let getUid = read . unpack . SCIM.Common.id . SCIM.Meta.thing + let getUid = read . unpack . Scim.Common.id . Scim.Meta.thing ssoid1 <- getSsoidViaSelf (getUid storedUser1) ssoid2 <- getSsoidViaSelf (getUid storedUser2) -- Delete the team diff --git a/services/spar/test-integration/Test/Spar/SCIMSpec.hs b/services/spar/test-integration/Test/Spar/ScimSpec.hs similarity index 93% rename from services/spar/test-integration/Test/Spar/SCIMSpec.hs rename to services/spar/test-integration/Test/Spar/ScimSpec.hs index 8a92543d76b..2d66b9c86bd 100644 --- a/services/spar/test-integration/Test/Spar/SCIMSpec.hs +++ b/services/spar/test-integration/Test/Spar/ScimSpec.hs @@ -9,14 +9,14 @@ {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE LambdaCase #-} -module Test.Spar.SCIMSpec where +module Test.Spar.ScimSpec where import Imports import Bilge import Bilge.Assert import Control.Lens import Data.ByteString.Conversion -import Spar.SCIM (CreateScimToken(..), CreateScimTokenResponse(..), ScimTokenList(..)) +import Spar.Scim (CreateScimToken(..), CreateScimTokenResponse(..), ScimTokenList(..)) import Spar.Types (ScimTokenInfo(..)) import Util @@ -32,8 +32,8 @@ specUsers = describe "operations with users" $ do it "creates a user in an existing team" $ do env <- ask -- Create a user via SCIM - user <- randomSCIMUser - (tok, _) <- registerIdPAndSCIMToken + user <- randomScimUser + (tok, _) <- registerIdPAndScimToken scimStoredUser <- createUser tok user let userid = scimUserId scimStoredUser -- Check that this user is present in Brig and that Brig's view @@ -49,8 +49,8 @@ specUsers = describe "operations with users" $ do describe "GET /Users" $ do it "lists all users in a team" $ do -- Create a user via SCIM - user <- randomSCIMUser - (tok, (owner, _, _)) <- registerIdPAndSCIMToken + user <- randomScimUser + (tok, (owner, _, _)) <- registerIdPAndScimToken storedUser <- createUser tok user -- Get all users via SCIM users <- listUsers tok Nothing @@ -66,8 +66,8 @@ specUsers = describe "operations with users" $ do it "doesn't list deleted users" $ do env <- ask -- Create a user via SCIM - user <- randomSCIMUser - (tok, _) <- registerIdPAndSCIMToken + user <- randomScimUser + (tok, _) <- registerIdPAndScimToken storedUser <- createUser tok user let userid = scimUserId storedUser -- Delete the user (TODO: do it via SCIM) @@ -80,8 +80,8 @@ specUsers = describe "operations with users" $ do describe "GET /Users/:id" $ do it "finds a SCIM-provisioned user" $ do -- Create a user via SCIM - user <- randomSCIMUser - (tok, _) <- registerIdPAndSCIMToken + user <- randomScimUser + (tok, _) <- registerIdPAndScimToken storedUser <- createUser tok user -- Check that the SCIM-provisioned user can be fetched storedUser' <- getUser tok (scimUserId storedUser) @@ -108,8 +108,8 @@ specUsers = describe "operations with users" $ do it "doesn't find a deleted user" $ do env <- ask -- Create a user via SCIM - user <- randomSCIMUser - (tok, _) <- registerIdPAndSCIMToken + user <- randomScimUser + (tok, _) <- registerIdPAndScimToken storedUser <- createUser tok user let userid = scimUserId storedUser -- Delete the user (TODO: do it via SCIM) diff --git a/services/spar/test-integration/Util.hs b/services/spar/test-integration/Util.hs index 4c36b7411d5..95d092782ae 100644 --- a/services/spar/test-integration/Util.hs +++ b/services/spar/test-integration/Util.hs @@ -1,5 +1,5 @@ module Util (module U) where import Util.Core as U -import Util.SCIM as U +import Util.Scim as U import Util.Types as U diff --git a/services/spar/test-integration/Util/Core.hs b/services/spar/test-integration/Util/Core.hs index 7b27ce137c2..a171fd839bf 100644 --- a/services/spar/test-integration/Util/Core.hs +++ b/services/spar/test-integration/Util/Core.hs @@ -36,6 +36,7 @@ module Util.Core , decodeBody , decodeBody' -- * Other + , defPassword , createUserWithTeam , createTeamMember , deleteUser @@ -247,7 +248,7 @@ createTeamMember brigreq galleyreq teamid perms = do <- postUser name False (Just ssoid) (Just teamid) brigreq TestSpar (ScimToken, (UserId, TeamId, IdP)) -registerIdPAndSCIMToken = do +registerIdPAndScimToken :: HasCallStack => TestSpar (ScimToken, (UserId, TeamId, IdP)) +registerIdPAndScimToken = do team@(_owner, teamid, idp) <- registerTestIdP - (, team) <$> registerSCIMToken teamid (Just (idp ^. idpId)) + (, team) <$> registerScimToken teamid (Just (idp ^. idpId)) -- | Create a fresh SCIM token and register it for the team. -registerSCIMToken :: HasCallStack => TeamId -> Maybe IdPId -> TestSpar ScimToken -registerSCIMToken teamid midpid = do +registerScimToken :: HasCallStack => TeamId -> Maybe IdPId -> TestSpar ScimToken +registerScimToken teamid midpid = do env <- ask tok <- ScimToken <$> do code <- liftIO UUID.nextRandom @@ -69,13 +69,13 @@ registerSCIMToken teamid midpid = do pure tok -- | Generate a SCIM user with a random name and handle. -randomSCIMUser :: TestSpar SCIM.User.User -randomSCIMUser = do +randomScimUser :: TestSpar Scim.User.User +randomScimUser = do suffix <- pack <$> replicateM 5 (liftIO (randomRIO ('0', '9'))) - pure $ SCIM.User.empty - { SCIM.User.userName = "scimuser_" <> suffix - , SCIM.User.displayName = Just ("Scim User #" <> suffix) - , SCIM.User.externalId = Just ("scimuser_extid_" <> suffix) + pure $ Scim.User.empty + { Scim.User.userName = "scimuser_" <> suffix + , Scim.User.displayName = Just ("Scim User #" <> suffix) + , Scim.User.externalId = Just ("scimuser_extid_" <> suffix) } ---------------------------------------------------------------------------- @@ -85,8 +85,8 @@ randomSCIMUser = do createUser :: HasCallStack => ScimToken - -> SCIM.User.User - -> TestSpar SCIM.StoredUser + -> Scim.User.User + -> TestSpar Scim.StoredUser createUser tok user = do env <- ask r <- createUser_ @@ -100,8 +100,8 @@ createUser tok user = do listUsers :: HasCallStack => ScimToken - -> Maybe SCIM.Filter - -> TestSpar [SCIM.StoredUser] + -> Maybe Scim.Filter + -> TestSpar [Scim.StoredUser] listUsers tok mbFilter = do env <- ask r <- listUsers_ @@ -110,17 +110,17 @@ listUsers tok mbFilter = do (env ^. teSpar) ScimToken -> UserId - -> TestSpar SCIM.StoredUser + -> TestSpar Scim.StoredUser getUser tok userid = do env <- ask r <- getUser_ @@ -178,7 +178,7 @@ listTokens zusr = do -- | Create a user. createUser_ :: Maybe ScimToken -- ^ Authentication - -> SCIM.User.User -- ^ User data + -> Scim.User.User -- ^ User data -> SparReq -- ^ Spar endpoint -> TestSpar ResponseLBS createUser_ auth user spar_ = do @@ -196,14 +196,14 @@ createUser_ auth user spar_ = do -- | List all users. listUsers_ :: Maybe ScimToken -- ^ Authentication - -> Maybe SCIM.Filter -- ^ Predicate to filter the results + -> Maybe Scim.Filter -- ^ Predicate to filter the results -> SparReq -- ^ Spar endpoint -> TestSpar ResponseLBS listUsers_ auth mbFilter spar_ = do call . get $ ( spar_ . paths ["scim", "v2", "Users"] - . queryItem' "filter" (toByteString' . SCIM.renderFilter <$> mbFilter) + . queryItem' "filter" (toByteString' . Scim.renderFilter <$> mbFilter) . scimAuth auth . acceptScim ) @@ -281,21 +281,21 @@ acceptScim :: Request -> Request acceptScim = accept "application/scim+json" -- | Get ID of a user returned from SCIM. -scimUserId :: SCIM.StoredUser -> UserId +scimUserId :: Scim.StoredUser -> UserId scimUserId storedUser = either err id (readEither id_) where - id_ = unpack (SCIM.id (SCIM.thing storedUser)) + id_ = unpack (Scim.id (Scim.thing storedUser)) err e = error $ "scimUserId: couldn't parse ID " ++ id_ ++ ": " ++ e -- | Check that some properties match between an SCIM user and a Brig user. userShouldMatch :: (HasCallStack, MonadIO m) - => SCIM.StoredUser -> User -> m () + => Scim.StoredUser -> User -> m () userShouldMatch scimStoredUser brigUser = liftIO $ do - let scimUser = SCIM.value (SCIM.thing scimStoredUser) + let scimUser = Scim.value (Scim.thing scimStoredUser) scimUserId scimStoredUser `shouldBe` userId brigUser - Just (Handle (SCIM.User.userName scimUser)) `shouldBe` + Just (Handle (Scim.User.userName scimUser)) `shouldBe` userHandle brigUser - fmap Name (SCIM.User.displayName scimUser) `shouldBe` + fmap Name (Scim.User.displayName scimUser) `shouldBe` Just (userName brigUser) diff --git a/services/spar/test/Arbitrary.hs b/services/spar/test/Arbitrary.hs index ab92253daf3..fe6f7f10397 100644 --- a/services/spar/test/Arbitrary.hs +++ b/services/spar/test/Arbitrary.hs @@ -19,7 +19,7 @@ import Data.Id () import SAML2.WebSSO.Test.Arbitrary () import Servant.API.ContentTypes import Spar.Types -import Spar.SCIM +import Spar.Scim import Test.QuickCheck diff --git a/stack.yaml b/stack.yaml index 02074899d75..2c4facf1e62 100644 --- a/stack.yaml +++ b/stack.yaml @@ -73,13 +73,6 @@ packages: commit: 7e996a93fec5901767f845a50316b3c18e51a61d extra-dep: true -# The version on Haskage (0.9.0.2) deadlocks so here we're using master. -# See https://github.com/jwiegley/async-pool/issues/2 -- location: - git: https://github.com/jwiegley/async-pool - commit: 5678c13c4ec7d96a55622f7171f8e6a6bb3a3947 - extra-dep: true - # Our fork gives us access to some guts that the upstream 'http-client' # doesn't expose; see https://github.com/wireapp/wire-server/pull/373#issuecomment-400251467 # @@ -119,7 +112,7 @@ packages: # services/spar: - location: git: https://github.com/wireapp/saml2-web-sso - commit: d69ef1338c7b13516eafd2f6f53cfe12c3aec36c # master (Dec 4, 2018) + commit: 3e04ed8e605733cedfaa68d82808b450b2d4508f # master (Jan 23, 2019) extra-dep: true - location: git: https://github.com/wireapp/hsaml2 @@ -132,12 +125,11 @@ packages: extra-dep: true - location: git: https://github.com/wireapp/hscim - commit: 9abf972d67dfec0cc1aad8ad0b61f600fe4f5476 # master (Nov 20, 2018) + commit: 5306564bffeeb4ad7216481e6ba132dffb214213 # master (Jan 22, 2019) extra-dep: true extra-deps: - async-2.2.1 -- lifted-async-0.10.0.3 - hinotify-0.4 - fsnotify-0.3.0.1 - base-prelude-1.3 @@ -147,6 +139,7 @@ extra-deps: - data-timeout-0.3 - geoip2-0.3.1.0 - mime-0.4.0.2 +- multiset-0.3.4.1 - text-icu-translit-0.1.0.7 - wai-middleware-gunzip-0.0.2 - invertible-hxt-0.1 # for hsaml2 / spar @@ -155,6 +148,7 @@ extra-deps: # for hscim - network-uri-static-0.1.1.0 - list-t-1.0.1 # 1.0.0.1 doesn't build +- unliftio-0.2.10 # for pooled concurrency utils in UnliftIO.Async # the following are just not on Stackage (and most of these were present in # LTS-11 but got evicted in LTS-12) @@ -175,6 +169,9 @@ flags: protobuf: True arbitrary: True + galley-types: + cql: True + extra-package-dbs: [] allow-newer: False diff --git a/tools/api-simulations/api-simulations.cabal b/tools/api-simulations/api-simulations.cabal index 5d8b97b8249..2fcfc7955cb 100644 --- a/tools/api-simulations/api-simulations.cabal +++ b/tools/api-simulations/api-simulations.cabal @@ -69,7 +69,6 @@ executable api-smoketest , http-client-tls >= 0.2 , imports , lens >= 4.1 - , lifted-async >= 0.2 , mime >= 0.4 , optparse-applicative >= 0.11 , retry >= 0.7 @@ -110,7 +109,6 @@ executable api-loadtest , http-client >= 0.4 , http-client-tls >= 0.2 , imports - , lifted-async >= 0.2 , metrics-core >= 0.1 , mime >= 0.4 , monad-control >= 0.3 @@ -122,4 +120,5 @@ executable api-loadtest , transformers >= 0.3 , tinylog >= 0.14 , types-common >= 0.11 + , unliftio >= 0.2.10 , uuid >= 1.3 diff --git a/tools/api-simulations/loadtest/src/Network/Wire/Simulations/LoadTest.hs b/tools/api-simulations/loadtest/src/Network/Wire/Simulations/LoadTest.hs index e8cb4f20d88..39b017eb919 100644 --- a/tools/api-simulations/loadtest/src/Network/Wire/Simulations/LoadTest.hs +++ b/tools/api-simulations/loadtest/src/Network/Wire/Simulations/LoadTest.hs @@ -5,7 +5,7 @@ module Network.Wire.Simulations.LoadTest where import Imports hiding (log) -import UnliftIO.Async.Extended as Async +import UnliftIO.Async import Data.Id (ConvId) import Network.Wire.Bot import Network.Wire.Bot.Assert @@ -57,12 +57,12 @@ runLoadTest s = replicateM (conversationsTotal s) mkConv runConv :: LoadTestSettings -> ([BotNet Bot], [BotNet Bot]) -> BotNet () runConv s g = do - active <- Async.sequencePooled (parallelRequests s) (fst g) - passive <- Async.sequencePooled (parallelRequests s) (snd g) + active <- pooledMapConcurrentlyN (parallelRequests s) id (fst g) + passive <- pooledMapConcurrentlyN (parallelRequests s) id (snd g) let bots = active ++ passive -- Clear existing clients ---------- log Info $ msg $ val "Clearing existing clients" - void $ forPooled (parallelRequests s) bots $ \b -> + pooledForConcurrentlyN_ (parallelRequests s) bots $ \b -> resetBotClients b -- Create conv --------------------- log Info $ msg $ val "Creating conversation" @@ -71,7 +71,7 @@ runConv s g = do -- Prepare ------------------------- log Info $ msg $ val "Preparing" let botsMarked = map (True,) active ++ map (False,) passive - states <- forPooled (parallelRequests s) botsMarked $ \(isActive, b) -> do + states <- pooledForConcurrentlyN (parallelRequests s) botsMarked $ \(isActive, b) -> do nmsg <- if isActive then between (messagesMin s) (messagesMax s) else pure 0 nast <- if isActive then between (assetsMin s) (assetsMax s) else pure 0 nClients <- between (clientsMin s) (clientsMax s) @@ -86,7 +86,7 @@ runConv s g = do return $! BotState mainClient otherClients conv bots nmsg nast -- Run ----------------------------- log Info $ msg $ val "Running" - void $ forPooled (parallelRequests s) (zip bots states) $ \(b, st) -> + pooledForConcurrentlyN_ (parallelRequests s) (zip bots states) $ \(b, st) -> runBotSession b $ do log Info $ msg $ val "Initializing sessions" let allClients = botClient st : botOtherClients st @@ -101,7 +101,7 @@ runConv s g = do runBot s st `Ex.onException` removeClients (b, st) -- Drain --------------------------- log Info $ msg $ val "Draining" - void $ forPooled (parallelRequests s) (zip bots states) $ \(b, st) -> do + pooledForConcurrentlyN_ (parallelRequests s) (zip bots states) $ \(b, st) -> do removeClients (b, st) drainBot b diff --git a/tools/db/auto-whitelist/auto-whitelist.cabal b/tools/db/auto-whitelist/auto-whitelist.cabal index 1621ee70220..93ab05cdc20 100644 --- a/tools/db/auto-whitelist/auto-whitelist.cabal +++ b/tools/db/auto-whitelist/auto-whitelist.cabal @@ -28,13 +28,13 @@ executable auto-whitelist , galley-types , imports , lens - , lifted-async , mtl , optparse-applicative , text , time , tinylog , types-common + , unliftio , uuid other-modules: diff --git a/tools/db/auto-whitelist/src/Work.hs b/tools/db/auto-whitelist/src/Work.hs index 09067246e44..8bc762c3db7 100644 --- a/tools/db/auto-whitelist/src/Work.hs +++ b/tools/db/auto-whitelist/src/Work.hs @@ -13,7 +13,7 @@ import Brig.Types hiding (Client) import Cassandra import Data.Id import System.Logger (Logger) -import UnliftIO.Async.Extended (mapMPooled) +import UnliftIO.Async (pooledMapConcurrentlyN_) import Data.List.Extra (nubOrd) import qualified System.Logger as Log @@ -24,7 +24,7 @@ runCommand :: Logger -> ClientState -> IO () runCommand l brig = runClient brig $ do services <- getServices existing <- filterM doesServiceExist (nubOrd services) - void $ mapMPooled 20 (whitelistService l) existing + pooledMapConcurrentlyN_ 20 (whitelistService l) existing ---------------------------------------------------------------------------- -- Queries diff --git a/tools/db/service-backfill/service-backfill.cabal b/tools/db/service-backfill/service-backfill.cabal index 250897684c2..361fc005487 100644 --- a/tools/db/service-backfill/service-backfill.cabal +++ b/tools/db/service-backfill/service-backfill.cabal @@ -28,13 +28,13 @@ executable service-backfill , galley-types , imports , lens - , lifted-async , mtl , optparse-applicative , text , time , tinylog , types-common + , unliftio , uuid other-modules: diff --git a/tools/db/service-backfill/src/Work.hs b/tools/db/service-backfill/src/Work.hs index 43d9e8392b7..efe082e6e12 100644 --- a/tools/db/service-backfill/src/Work.hs +++ b/tools/db/service-backfill/src/Work.hs @@ -13,7 +13,7 @@ import Brig.Types hiding (Client) import Cassandra hiding (pageSize) import Data.Id import System.Logger (Logger) -import UnliftIO.Async.Extended (mapMPooled) +import UnliftIO.Async (pooledMapConcurrentlyN) import Data.Conduit import Data.Conduit.Internal (zipSources) import qualified Data.Conduit.List as C @@ -29,7 +29,7 @@ runCommand l brig galley = (transPipe (runClient galley) getUsers) .| C.mapM (\(i, p) -> Log.info l (Log.field "convs" (show (i * pageSize))) >> pure p) - .| C.mapM (runClient galley . mapMPooled 10 resolveBot) + .| C.mapM (runClient galley . pooledMapConcurrentlyN 10 resolveBot) .| C.concat .| C.catMaybes .| C.chunksOf 50 .| C.mapM_ (runClient brig . writeBots) diff --git a/tools/makedeb/Makefile b/tools/makedeb/Makefile index 7c61daaa2cc..228305e6653 100644 --- a/tools/makedeb/Makefile +++ b/tools/makedeb/Makefile @@ -2,10 +2,11 @@ LANG := en_US.UTF-8 SHELL := /usr/bin/env bash NAME := makedeb VERSION ?= +ARCH := $(shell if [ -f "`which dpkg-architecture`" ]; then dpkg-architecture -qDEB_HOST_ARCH; else [ -f "`which dpkg`" ] && dpkg --print-architecture; fi ) BUILD_NUMBER ?= 0 BUILD_LABEL ?= local BUILD := $(BUILD_NUMBER)$(shell [ "${BUILD_LABEL}" == "" ] && echo "" || echo ".${BUILD_LABEL}") -DEB := $(NAME)_$(VERSION)+$(BUILD)_amd64.deb +DEB := $(NAME)_$(VERSION)+$(BUILD)_$(ARCH).deb guard-%: @ if [ "${${*}}" = "" ]; then \ @@ -31,19 +32,19 @@ clean: install: init stack install . --pedantic --test --bench --no-run-benchmarks --local-bin-path=dist -.PHONY: +.PHONY: compile compile: stack build . --pedantic --test --bench --no-run-benchmarks --no-copy-bins .PHONY: dist dist: guard-VERSION install $(DEB) .metadata -$(DEB): +$(DEB): guard-ARCH guard-VERSION $(eval $@_DIR := $(shell mktemp -d -t makedeb.XXXXXXXXXX)) cp -R -L deb $($@_DIR) sed -i "s/<>/$(VERSION)/g" $($@_DIR)/deb/DEBIAN/control sed -i "s/<>/$(BUILD)/g" $($@_DIR)/deb/DEBIAN/control - sed -i "s/<>/amd64/g" $($@_DIR)/deb/DEBIAN/control + sed -i "s/<>/$(ARCH)/g" $($@_DIR)/deb/DEBIAN/control dpkg-deb -b $($@_DIR)/deb ../../dist/$(DEB) rm -rf $($@_DIR)