diff --git a/.docker/api/Dockerfile b/.docker/api/Dockerfile index e3d1b7df..5369afdf 100644 --- a/.docker/api/Dockerfile +++ b/.docker/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.7.3 +FROM python:3.6 RUN apt-get -y update && apt-get -y upgrade && apt-get install -y ffmpeg diff --git a/.docker/scheduler/Dockerfile b/.docker/scheduler/Dockerfile index 3647a89a..b5e84ad6 100644 --- a/.docker/scheduler/Dockerfile +++ b/.docker/scheduler/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.7.3 +FROM python:3.6 RUN apt-get -y update && apt-get -y upgrade && apt-get install -y ffmpeg && apt-get install -y supervisor @@ -21,4 +21,4 @@ RUN pip install -r requirements.txt VOLUME ["/opt/okuna-api"] -CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] \ No newline at end of file +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] diff --git a/.docker/scheduler/supervisord.conf b/.docker/scheduler/supervisord.conf index a7057d12..a3011aec 100644 --- a/.docker/scheduler/supervisord.conf +++ b/.docker/scheduler/supervisord.conf @@ -49,4 +49,18 @@ startretries = 3 ; max # of serial start failures (default 3) exitcodes = 0, 2 ; 'expected' exit codes for process (default 0, 2) killasgroup = true ; SIGKILL the UNIX process group (def false) stopasgroup = true +stopsignal = QUIT + +[program: rqscheduleractivityscore] +command = python manage.py rqscheduler --queue=process-activity-score +loglevel = info ; (log level;default info; others: debug, warn, trace) +numprocs = 1 +directory = /opt/okuna-api +autostart = true +autorestart = unexpected +startsecs = 1 ; number of secs prog must stay running (def. 1) +startretries = 3 ; max # of serial start failures (default 3) +exitcodes = 0, 2 ; 'expected' exit codes for process (default 0, 2) +killasgroup = true ; SIGKILL the UNIX process group (def false) +stopasgroup = true stopsignal = QUIT \ No newline at end of file diff --git a/.docker/worker/Dockerfile b/.docker/worker/Dockerfile index 86a5e0a2..2e6e50f5 100644 --- a/.docker/worker/Dockerfile +++ b/.docker/worker/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.7.3 +FROM python:3.6 RUN apt-get -y update && apt-get -y upgrade && apt-get install -y ffmpeg && apt-get install -y supervisor @@ -21,4 +21,4 @@ RUN pip install -r /requirements.txt VOLUME ["/opt/okuna-api"] -CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] \ No newline at end of file +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] diff --git a/.docker/worker/supervisord.conf b/.docker/worker/supervisord.conf index 6b5be544..46b26fd4 100644 --- a/.docker/worker/supervisord.conf +++ b/.docker/worker/supervisord.conf @@ -49,4 +49,18 @@ startretries = 3 ; max # of serial start failures (default 3) exitcodes = 0, 2 ; 'expected' exit codes for process (default 0, 2) killasgroup = true ; SIGKILL the UNIX process group (def false) stopasgroup = true +stopsignal = QUIT + +[program: rqworkeractivityscore] +command = python manage.py rqworker process-activity-score +loglevel = info ; (log level;default info; others: debug, warn, trace) +numprocs = 1 +directory = /opt/okuna-api +autostart = true +autorestart = unexpected +startsecs = 1 ; number of secs prog must stay running (def. 1) +startretries = 3 ; max # of serial start failures (default 3) +exitcodes = 0, 2 ; 'expected' exit codes for process (default 0, 2) +killasgroup = true ; SIGKILL the UNIX process group (def false) +stopasgroup = true stopsignal = QUIT \ No newline at end of file diff --git a/.ebextensions/supervisord.config b/.ebextensions/supervisord.config index f961b95c..427d4d2f 100644 --- a/.ebextensions/supervisord.config +++ b/.ebextensions/supervisord.config @@ -8,7 +8,7 @@ files: file=/opt/python/run/supervisor.sock ; (the path to the socket file) ;chmod=0700 ; socket file mode (default 0700) ;chown=nobody:nogroup ; socket file uid:gid owner - + [supervisord] logfile=/opt/python/log/supervisord.log ; (main log file;default $CWD/supervisord.log) logfile_maxbytes=10MB ; (max main logfile bytes b4 rotation;default 50MB) @@ -19,13 +19,13 @@ files: minprocs=200 ; (min. avail process descriptors;default 200) directory=/opt/python/current/app ; (default is not to cd during start) ;nocleanup=true ; (don not clean up tempfiles at start;default false) - + [rpcinterface:supervisor] supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface - + [supervisorctl] serverurl=unix:///opt/python/run/supervisor.sock - + [program:httpd] command=/opt/python/bin/httpdlaunch stdout_logfile=/opt/python/log/django_stdout.log @@ -39,8 +39,8 @@ files: exitcodes=0,2 ; 'expected' exit codes for process (default 0,2) killasgroup=false ; SIGKILL the UNIX process group (def false) redirect_stderr=false - - + + [program:rqworkerdefault] command=/bin/bash -c 'source /opt/python/current/env && source /opt/python/run/venv/bin/activate && python manage.py rqworker default' stdout_logfile=/opt/python/log/rqworkerdefault_stdout.log @@ -61,7 +61,7 @@ files: stopasgroup=true stopsignal=QUIT redirect_stderr=false - + [program:rqworkerhigh] command=/bin/bash -c 'source /opt/python/current/env && source /opt/python/run/venv/bin/activate && python manage.py rqworker high' stdout_logfile=/opt/python/log/rqworkerhigh_stdout.log @@ -81,7 +81,7 @@ files: stopasgroup=true stopsignal=QUIT redirect_stderr=false - + [program:rqworkerlow] command=/bin/bash -c 'source /opt/python/current/env && source /opt/python/run/venv/bin/activate && python manage.py rqworker low' stdout_logfile=/opt/python/log/rqworkerlow_stdout.log @@ -101,3 +101,23 @@ files: stopasgroup=true stopsignal=QUIT redirect_stderr=false + + [program:rqworkeractivityscore] + command=/bin/bash -c 'source /opt/python/current/env && source /opt/python/run/venv/bin/activate && python manage.py rqworker process-activity-score' + stdout_logfile=/opt/python/log/rqworkerlow_stdout.log + stderr_logfile=/opt/python/log/rqworkerlow_stderr.log + pidfile=/opt/python/run/rqworker_low + loglevel=info ; (log level;default info; others: debug,warn,trace) + user=nobody + group=nobody + numprocs=1 + directory=/opt/python/current/app + autostart=true + autorestart=unexpected + startsecs=1 ; number of secs prog must stay running (def. 1) + startretries=3 ; max # of serial start failures (default 3) + exitcodes=0,2 ; 'expected' exit codes for process (default 0,2) + killasgroup=true ; SIGKILL the UNIX process group (def false) + stopasgroup=true + stopsignal=QUIT + redirect_stderr=false diff --git a/CHANGELOG.md b/CHANGELOG.md index bcd2da1d..6ef91d42 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,10 +4,17 @@ The change log for the API server for Okuna. ## Table of contents +- [Release 0.0.66](#release-0.0.66) - [Release 0.0.63](#release-0.0.63) - [Release 0.0.59](#release-0.0.59) +## Release 0.0.66 + +- Introduce activity score in posts and communities, add jobs for the same +- Refactor trending posts and trending communities to use activity score + + ## Release 0.0.63 - Improve performance for linked users API diff --git a/Pipfile b/Pipfile index 27ea21c6..c0589632 100644 --- a/Pipfile +++ b/Pipfile @@ -55,6 +55,7 @@ shutilwhich = "*" halo = "*" watchdog = "*" spectra = "*" +colorlog = "*" [pipenv] allow_prereleases = true diff --git a/Pipfile.lock b/Pipfile.lock index edd06d9b..17a8b8aa 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "5211b8516fdafee639e2bd23fd239354e8920714bab370d726c8c58792ccedb7" + "sha256": "28fdd02c898eb2875dac892a5d7500b02d9856a85f09461de9e6830fcbfdf001" }, "pipfile-spec": 6, "requires": {}, @@ -16,10 +16,10 @@ "default": { "appdirs": { "hashes": [ - "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", - "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e" + "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", + "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" ], - "version": "==1.4.3" + "version": "==1.4.4" }, "bandit": { "hashes": [ @@ -31,27 +31,27 @@ }, "beautifulsoup4": { "hashes": [ - "sha256:594ca51a10d2b3443cbac41214e12dbb2a1cd57e1a7344659849e2e20ba6a8d8", - "sha256:a4bbe77fd30670455c5296242967a123ec28c37e9702a8a81bd2f20a4baf0368", - "sha256:d4e96ac9b0c3a6d3f0caae2e4124e6055c5dcafde8e2f831ff194c104f0775a0" + "sha256:73cc4d115b96f79c7d77c1c7f7a0a8d4c57860d1041df407dd1aae7f07a77fd7", + "sha256:a6237df3c32ccfaee4fd201c8f5f9d9df619b93121d01353a64a73ce8c6ef9a8", + "sha256:e718f2342e2e099b640a34ab782407b7b676f47ee272d6739e60b8ea23829f2c" ], "index": "pypi", - "version": "==4.9.0" + "version": "==4.9.1" }, "boto3": { "hashes": [ - "sha256:60d9833685713815fb3fd72a513813a2810d75179c35d781388825a09cccf6cb", - "sha256:85155ebc55a4437ce48e45e900c10794f8372bba74e2c4c3c738b3b56d08139a" + "sha256:703157e8f16c57133fde0082a2d8b99ca6d36120ca4479df1464df80dd148a87", + "sha256:74d78ca0fd706f447a5f787d88214b298b213b1eddf2e8197051a0844df45146" ], "index": "pypi", - "version": "==1.13.5" + "version": "==1.13.14" }, "botocore": { "hashes": [ - "sha256:75fb94cb4dac9fd4967a536a212fd0cc1def9ef8f41d97fc52e1f14b4c465647", - "sha256:eaaffe84db50281f589c8eee343064294d82eee9966b172778f5c795ffe43149" + "sha256:a8e4cb8ed5a7e59fce935c9a550ccf616e9d5a053d02c374832610c2e377ca92", + "sha256:c8a5647069f978ae664987ebdeffaef0eb2910e88a52fcc8d52c9eb014fed8cc" ], - "version": "==1.16.5" + "version": "==1.16.14" }, "certifi": { "hashes": [ @@ -81,6 +81,14 @@ ], "version": "==0.4.3" }, + "colorlog": { + "hashes": [ + "sha256:30aaef5ab2a1873dec5da38fd6ba568fa761c9fa10b40241027fa3edea47f3d2", + "sha256:732c191ebbe9a353ec160d043d02c64ddef9028de8caae4cfa8bd49b6afed53e" + ], + "index": "pypi", + "version": "==4.1.0" + }, "colormath": { "hashes": [ "sha256:3d4605af344527da0e4f9f504fad7ddbebda35322c566a6c72e28edb1ff31217" @@ -169,11 +177,11 @@ }, "django-cacheops": { "hashes": [ - "sha256:56ea95ad4eb1b61b6c014b7b23f3203ef8816611c9e8a097a7e99628810e9f16", - "sha256:a94418c8ba816c7c65d8cc2dd265313e042f4339c3b62a3682ebec89d75ff222" + "sha256:0f3e28c16d172b513980f604e026e356db559a4e511d2a12d06f3d34d5c9ca58", + "sha256:f7f657fd8c5a79a2739e781d25f06492725aa1fce0032c25fa7b06e057d8be49" ], "index": "pypi", - "version": "==4.2" + "version": "==5.0" }, "django-cursor-pagination": { "hashes": [ @@ -227,11 +235,11 @@ }, "django-ordered-model": { "hashes": [ - "sha256:0931f498008f91a00a32c4e0ae08a662ef608a1092bf6e6ec9af9b1a83f08acf", - "sha256:abf0d963f7e607a994baf6bc300e50af647b3d243c3e592c6cc8f8b924b6d427" + "sha256:29af6624cf3505daaf0df00e2df1d0726dd777b95e08f304d5ad0264092aa934", + "sha256:d867166ed4dd12501139e119cbbc5b4d19798a3e72740aef0af4879ba97102cf" ], "index": "pypi", - "version": "==3.3.0" + "version": "==3.4.1" }, "django-positions": { "hashes": [ @@ -265,11 +273,11 @@ }, "django-rq": { "hashes": [ - "sha256:060bece17912c4188fc3166e79c8244dc0608fbacc68e1069a47ceb91759ffcf", - "sha256:70e4e4a6566c0c1449a2f08ad74865a2045d74659da63aeea66681015ae1bbdb" + "sha256:ee9aefba814ae00b6d6a566ed0cd2a0a090e0e42d90bb9c4f1c70d3cacb47a52", + "sha256:f0bd7ac3b8b4b4abb161646e80a8751c81664beaa810e92fdbcf9fb3e7d72727" ], "index": "pypi", - "version": "==2.3.1" + "version": "==2.3.2" }, "django-rq-scheduler": { "hashes": [ @@ -320,10 +328,10 @@ }, "ffmpy": { "hashes": [ - "sha256:65abdddfa2561bb86b6c9ecfced53c7a15ea5080db4ddad08da7de5a348929f1" + "sha256:c52a86f530f1caefddc15b50193f86d1bfe009855893b06e683d8a9445af99fb" ], "index": "pypi", - "version": "==0.2.2" + "version": "==0.2.3" }, "funcy": { "hashes": [ @@ -362,10 +370,10 @@ }, "jmespath": { "hashes": [ - "sha256:695cb76fa78a10663425d5b73ddc5714eb711157e52704d69be03b1a02ba4fec", - "sha256:cca55c8d153173e21baa59983015ad0daf603f9cb799904ff057bfb8ff8dc2d9" + "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", + "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" ], - "version": "==0.9.5" + "version": "==0.10.0" }, "langdetect": { "hashes": [ @@ -425,29 +433,34 @@ }, "numpy": { "hashes": [ - "sha256:00d7b54c025601e28f468953d065b9b121ddca7fff30bed7be082d3656dd798d", - "sha256:02ec9582808c4e48be4e93cd629c855e644882faf704bc2bd6bbf58c08a2a897", - "sha256:0e6f72f7bb08f2f350ed4408bb7acdc0daba637e73bce9f5ea2b207039f3af88", - "sha256:1be2e96314a66f5f1ce7764274327fd4fb9da58584eaff00b5a5221edefee7d6", - "sha256:2466fbcf23711ebc5daa61d28ced319a6159b260a18839993d871096d66b93f7", - "sha256:2b573fcf6f9863ce746e4ad00ac18a948978bb3781cffa4305134d31801f3e26", - "sha256:3f0dae97e1126f529ebb66f3c63514a0f72a177b90d56e4bce8a0b5def34627a", - "sha256:50fb72bcbc2cf11e066579cb53c4ca8ac0227abb512b6cbc1faa02d1595a2a5d", - "sha256:57aea170fb23b1fd54fa537359d90d383d9bf5937ee54ae8045a723caa5e0961", - "sha256:709c2999b6bd36cdaf85cf888d8512da7433529f14a3689d6e37ab5242e7add5", - "sha256:7d59f21e43bbfd9a10953a7e26b35b6849d888fc5a331fa84a2d9c37bd9fe2a2", - "sha256:904b513ab8fbcbdb062bed1ce2f794ab20208a1b01ce9bd90776c6c7e7257032", - "sha256:96dd36f5cdde152fd6977d1bbc0f0561bccffecfde63cd397c8e6033eb66baba", - "sha256:9933b81fecbe935e6a7dc89cbd2b99fea1bf362f2790daf9422a7bb1dc3c3085", - "sha256:bbcc85aaf4cd84ba057decaead058f43191cc0e30d6bc5d44fe336dc3d3f4509", - "sha256:dccd380d8e025c867ddcb2f84b439722cf1f23f3a319381eac45fd077dee7170", - "sha256:e22cd0f72fc931d6abc69dc7764484ee20c6a60b0d0fee9ce0426029b1c1bdae", - "sha256:ed722aefb0ebffd10b32e67f48e8ac4c5c4cf5d3a785024fdf0e9eb17529cd9d", - "sha256:efb7ac5572c9a57159cf92c508aad9f856f1cb8e8302d7fdb99061dbe52d712c", - "sha256:efdba339fffb0e80fcc19524e4fdbda2e2b5772ea46720c44eaac28096d60720", - "sha256:f22273dd6a403ed870207b853a856ff6327d5cbce7a835dfa0645b3fc00273ec" - ], - "version": "==1.18.4" + "sha256:0028da01578ddb0d7372ccd168d7e7e3b04f25881db7f520bff6c50456aa7b02", + "sha256:09e0e60d6ed6417516a08f9767665ae459507dd1df63942e0c0bb69d93f05c0e", + "sha256:0bffe7f20aa96e3b16a99c5a38a6e3ebeeff9203c8000723f040c72746808c5b", + "sha256:1041dd124664263f1b9cde98028dd2d0f164a94b13a06183f27a7b7dd14767ad", + "sha256:164d8d2a0de07c3aba089e7db0873930ac05252d985c8825f247bd79ddf3bd9d", + "sha256:1ae657a2390cbc1553df60cb2a5f69742761d0ad5957b0113c9c00bb06276a78", + "sha256:1ae709f648755ce757ef896fb110c52cbc76bc787a1243ad9b1262be3cc01e64", + "sha256:1d84d42be12fc7d3e9afc2e381136e6a4a0aa509183166b99079fd87afb8a6a6", + "sha256:361c84cdf8e10a27d1ce7bb0404284eed2f704fb10ebbdb714fe5a51ef4f2765", + "sha256:59b4ace51c26d6f6698ebaee442a37d2f34415ad2d9c683e18bb462f50768697", + "sha256:5c1db3b05428c6c8397c2457063b16a03688f1d0531dac96afa46a0362a5f237", + "sha256:705551bb2fb68a3ee1c5868a24d9e57670324a2c25530e3846b58f111ca3bada", + "sha256:72a8744aa28d2f85629810aa13fe45b13992ca9566eade5fecb0e916d7df6c80", + "sha256:82a905f8d920aa1dc2d642a1e76ed54f2baa3eb23e2216bc6cd41ae2b274dded", + "sha256:876a0d72f16e60c34678ff52535d0ccdfb5718ed0ebac4ed50187bd6e06c1bac", + "sha256:8ac99d78e3ebc41b0dccf024a8dd36057abfa4dfcf3875259abf09da28e89fd2", + "sha256:8c4be83b9f253701ff865b6a9de26bbb67a3104486123347a3629101d3268a43", + "sha256:96578b9000e8ca35b83e96237d617345c4ac7bf8816cb950ddf76235b3b7306c", + "sha256:c39e84169f93899a15dbb7cbd3e68bd6bb31f56800658d966f89a2186eb4f929", + "sha256:c58eedde4999735da1d95a4af266a43ba1c32fbc2021941bb5149ad58da1312d", + "sha256:c995c832ddf4ce88b6383ce8c9160e86d614141412c0c874b6df87f680783528", + "sha256:d5833cb9cce627e960c87b75eb1878498cdf430155062f9423cee5617032284f", + "sha256:de874f2537e4e604c1db5905c4728b6b715c66a85bc71b5bc1b236973dc7610a", + "sha256:f45938abfa864e342f6719f05150f6458e018e22793a6fdf60e0ea4d4d15f53c", + "sha256:f6fe5dd6526fa6c0083fb5218a903dc9d9ea02df66996cd3be8c44c3b97894d5", + "sha256:fbd9dbb96fa22ee2f2cfad5311563a9df4528d3ac70f7635a9da0c7424ba4459" + ], + "version": "==1.19.0rc1" }, "onesignal-sdk": { "hashes": [ @@ -459,10 +472,10 @@ }, "packaging": { "hashes": [ - "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3", - "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752" + "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", + "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], - "version": "==20.3" + "version": "==20.4" }, "pathtools": { "hashes": [ @@ -582,11 +595,11 @@ }, "redis": { "hashes": [ - "sha256:174101a3ce04560d716616290bb40e0a2af45d5844c8bd474c23fc5c52e7a46a", - "sha256:7378105cd8ea20c4edc49f028581e830c01ad5f00be851def0f4bc616a83cd89" + "sha256:2ef11f489003f151777c064c5dbc6653dfb9f3eade159bcadc524619fddc2242", + "sha256:6d65e84bc58091140081ee9d9c187aab0480097750fac44239307a3bdf0b1251" ], "index": "pypi", - "version": "==3.5.0" + "version": "==3.5.2" }, "requests": { "hashes": [ @@ -608,10 +621,10 @@ }, "rq": { "hashes": [ - "sha256:49c9149fa9301f98d918f3042f36bed4252d37193d222a1ce8b0e25886442377", - "sha256:c3e65a8ba5e59287308f23679f7fe729b9380531e4f6cdabb2dee99b82834811" + "sha256:318017229145a40d34c20bc21ecdcd8c217a326eb08c9bee249e8c9e09846e36", + "sha256:e6d82156eb43a8fffe451099a716114f5f163ec7603b6e8c8dab16a1b31f4547" ], - "version": "==1.3.0" + "version": "==1.4.1" }, "rq-scheduler": { "hashes": [ @@ -652,10 +665,10 @@ }, "six": { "hashes": [ - "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", - "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "version": "==1.14.0" + "version": "==1.15.0" }, "smmap": { "hashes": [ @@ -666,10 +679,10 @@ }, "soupsieve": { "hashes": [ - "sha256:e914534802d7ffd233242b785229d5ba0766a7f487385e3f714446a07bf540ae", - "sha256:fcd71e08c0aee99aca1b73f45478549ee7e7fc006d51b37bec9e9def7dc22b69" + "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55", + "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232" ], - "version": "==2.0" + "version": "==2.0.1" }, "spectra": { "hashes": [ @@ -722,10 +735,10 @@ }, "toml": { "hashes": [ - "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", - "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" + "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f", + "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" ], - "version": "==0.10.0" + "version": "==0.10.1" }, "uritools": { "hashes": [ diff --git a/openbook/settings.py b/openbook/settings.py index ea1626d7..c2db8308 100644 --- a/openbook/settings.py +++ b/openbook/settings.py @@ -19,6 +19,7 @@ from dotenv import load_dotenv, find_dotenv from sentry_sdk.integrations.django import DjangoIntegration from django_replicated.settings import * +from decimal import Decimal, getcontext # Logging config from sentry_sdk.integrations.rq import RqIntegration @@ -177,6 +178,8 @@ REDIS_RQ_DEFAULT_JOBS_CACHE_LOCATION = '%(redis_location)s/%(db)d' % {'redis_location': REDIS_LOCATION, 'db': 1} REDIS_RQ_HIGH_JOBS_CACHE_LOCATION = '%(redis_location)s/%(db)d' % {'redis_location': REDIS_LOCATION, 'db': 2} REDIS_RQ_LOW_JOBS_CACHE_LOCATION = '%(redis_location)s/%(db)d' % {'redis_location': REDIS_LOCATION, 'db': 3} +REDIS_ACTIVITY_SCORES_JOBS_CACHE_LOCATION = '%(redis_location)s/%(db)d' % { + 'redis_location': REDIS_LOCATION, 'db': 4} CACHES = { 'default': { @@ -211,6 +214,14 @@ }, "KEY_PREFIX": "ob-api-rq-low-job-" }, + 'activity-score-jobs': { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_ACTIVITY_SCORES_JOBS_CACHE_LOCATION, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient" + }, + "KEY_PREFIX": "ob-api-actvty-score-job-" + }, } CACHEOPS_REDIS_DB = int(os.environ.get('CACHEOPS_REDIS_DB', '1')) @@ -236,6 +247,9 @@ 'low': { 'USE_REDIS_CACHE': 'rq-low-jobs', }, + 'process-activity-score': { + 'USE_REDIS_CACHE': 'activity-score-jobs', + }, } if IS_BUILD: @@ -527,7 +541,26 @@ MIN_UNIQUE_TOP_POST_REACTIONS_COUNT = int(os.environ.get('MIN_UNIQUE_TOP_POST_REACTIONS_COUNT', '5')) MIN_UNIQUE_TOP_POST_COMMENTS_COUNT = int(os.environ.get('MIN_UNIQUE_TOP_POST_COMMENTS_COUNT', '5')) -MIN_UNIQUE_TRENDING_POST_REACTIONS_COUNT = int(os.environ.get('MIN_UNIQUE_TRENDING_POST_REACTIONS_COUNT', '5')) + +# for activity score, set decimal precision to 10 +getcontext().prec = 10 + +MIN_ACTIVITY_SCORE_FOR_POST_TRENDING = Decimal((os.environ.get('MIN_ACTIVITY_SCORE_FOR_POST_TRENDING', 0.002))) +MIN_ACTIVITY_SCORE_FOR_COMMUNITY_TRENDING = Decimal((os.environ.get('MIN_ACTIVITY_SCORE_FOR_COMMUNITY_TRENDING', 0.002))) +ACTIVITY_ATOMIC_WEIGHT = Decimal((os.environ.get('ACTIVITY_ATOMIC_WEIGHT', 0.001))) + +ACTIVITY_UNIQUE_REACTION_MULTIPLIER = int(os.environ.get('ACTIVITY_UNIQUE_REACTION_MULTIPLIER', 1)) +ACTIVITY_UNIQUE_COMMENT_MULTIPLIER = int(os.environ.get('ACTIVITY_UNIQUE_COMMENT_MULTIPLIER', 1)) +ACTIVITY_COUNT_COMMENTS_MULTIPLIER = int(os.environ.get('ACTIVITY_COUNT_COMMENTS_MULTIPLIER', 1)) +ACTIVITY_UNIQUE_POST_MULTIPLIER = int(os.environ.get('ACTIVITY_UNIQUE_POST_MULTIPLIER', 1)) +ACTIVITY_COUNT_POSTS_MULTIPLIER = int(os.environ.get('ACTIVITY_COUNT_POSTS_MULTIPLIER', 1)) + +ACTIVITY_UNIQUE_REACTION_WEIGHT = Decimal((ACTIVITY_ATOMIC_WEIGHT * ACTIVITY_UNIQUE_REACTION_MULTIPLIER)) +ACTIVITY_UNIQUE_COMMENT_WEIGHT = Decimal((ACTIVITY_ATOMIC_WEIGHT * ACTIVITY_UNIQUE_COMMENT_MULTIPLIER)) +ACTIVITY_COUNT_COMMENTS_WEIGHT = Decimal((ACTIVITY_ATOMIC_WEIGHT * ACTIVITY_COUNT_COMMENTS_MULTIPLIER)) +ACTIVITY_UNIQUE_POST_WEIGHT = Decimal((ACTIVITY_ATOMIC_WEIGHT * ACTIVITY_UNIQUE_POST_MULTIPLIER)) +ACTIVITY_COUNT_POSTS_WEIGHT = Decimal((ACTIVITY_ATOMIC_WEIGHT * ACTIVITY_COUNT_POSTS_MULTIPLIER)) +ACTIVITY_SCORE_EXPIRY_IN_HOURS = int(os.environ.get('ACTIVITY_SCORE_EXPIRY_IN_HOURS', 12)) # Email Config @@ -550,7 +583,7 @@ OS_TRANSLATION_STRATEGY_NAME = 'testing' MIN_UNIQUE_TOP_POST_REACTIONS_COUNT = 1 MIN_UNIQUE_TOP_POST_COMMENTS_COUNT = 1 - MIN_UNIQUE_TRENDING_POST_REACTIONS_COUNT = 1 + MIN_ACTIVITY_SCORE_FOR_POST_TRENDING = 0.001 if IS_PRODUCTION: AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID') diff --git a/openbook/urls.py b/openbook/urls.py index 59cf73eb..b93e8e8f 100644 --- a/openbook/urls.py +++ b/openbook/urls.py @@ -83,7 +83,7 @@ from openbook_posts.views.post_media.views import PostMedia from openbook_posts.views.post_reaction.views import PostReactionItem from openbook_posts.views.post_reactions.views import PostReactions, PostReactionsEmojiCount, PostReactionEmojiGroups -from openbook_posts.views.posts.views import Posts, TrendingPosts, TopPosts, TrendingPostsNew, \ +from openbook_posts.views.posts.views import Posts, TrendingPosts, TopPosts, TrendingPostsLegacy, \ ProfilePostsExcludedCommunities, SearchProfilePostsExcludedCommunities, TopPostsExcludedCommunities, \ SearchTopPostsExcludedCommunities, ProfilePostsExcludedCommunity, TopPostsExcludedCommunity from openbook_importer.views import ImportItem @@ -233,7 +233,7 @@ path('/', include(post_patterns)), path('', Posts.as_view(), name='posts'), path('trending/', TrendingPosts.as_view(), name='trending-posts'), - path('trending/new/', TrendingPostsNew.as_view(), name='trending-posts-new'), + path('trending/new/', TrendingPostsLegacy.as_view(), name='trending-posts-new'), path('emojis/groups/', PostReactionEmojiGroups.as_view(), name='posts-emoji-groups'), path('profile/', include(posts_profile_patterns)), path('top/', include(posts_top_patterns)), @@ -445,7 +445,6 @@ urlpatterns = [ path('api/', include(api_patterns)), url('admin/', admin.site.urls), - path('django-rq/', include('django_rq.urls')), url('health/', Health.as_view(), name='health'), ] diff --git a/openbook_auth/models.py b/openbook_auth/models.py index 0941cbf6..f7efc209 100644 --- a/openbook_auth/models.py +++ b/openbook_auth/models.py @@ -1991,13 +1991,18 @@ def get_trending_posts(self, max_id=None, min_id=None): Post = get_post_model() return Post.get_trending_posts_for_user_with_id(user_id=self.pk, max_id=max_id, min_id=min_id) - def get_trending_posts_old(self): + def get_trending_posts_legacy(self): Post = get_post_model() - return Post.get_trending_posts_old_for_user_with_id(user_id=self.pk) + return Post.get_trending_posts_for_user_with_id_legacy(user_id=self.pk) def get_trending_communities(self, category_name=None): Community = get_community_model() - return Community.get_trending_communities_for_user_with_id(user_id=self.pk, category_name=category_name) + return Community.get_trending_communities_for_user_with_id(user_id=self.pk, + category_name=category_name) + + def get_trending_communities_by_members(self, category_name=None): + Community = get_community_model() + return Community.get_trending_communities_by_members_for_user_with_id(user_id=self.pk, category_name=category_name) def search_communities_with_query(self, query, excluded_from_profile_posts): Community = get_community_model() diff --git a/openbook_auth/tests/views/test_authenticated_user.py b/openbook_auth/tests/views/test_authenticated_user.py index aba7dab8..3f8ff4e5 100644 --- a/openbook_auth/tests/views/test_authenticated_user.py +++ b/openbook_auth/tests/views/test_authenticated_user.py @@ -846,8 +846,6 @@ def test_cannot_set_invalid_language(self): 'language_id': 99999 }, **headers) - print(response) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) user.refresh_from_db() self.assertTrue(user.language.id, language.id) diff --git a/openbook_common/tests/models.py b/openbook_common/tests/models.py index f9e46bff..ea6809f1 100644 --- a/openbook_common/tests/models.py +++ b/openbook_common/tests/models.py @@ -1,6 +1,6 @@ from unittest.mock import patch -from rest_framework.test import APITestCase +from rest_framework.test import APITestCase, APITransactionTestCase class OpenbookAPITestCase(APITestCase): @@ -10,3 +10,12 @@ def setUp(self): def tearDown(self): self.patcher.stop() + + +class OpenbookAPITransactionTestCase(APITransactionTestCase): + def setUp(self): + self.patcher = patch('openbook_notifications.helpers._send_notification_to_user') + self.mock_foo = self.patcher.start() + + def tearDown(self): + self.patcher.stop() diff --git a/openbook_common/utils/helpers.py b/openbook_common/utils/helpers.py index 19478b46..467a2f42 100644 --- a/openbook_common/utils/helpers.py +++ b/openbook_common/utils/helpers.py @@ -7,6 +7,7 @@ import magic import spectra +from cursor_pagination import CursorPaginator from django.http import QueryDict from imagekit.utils import get_cache from imagekit.models import ProcessedImageField @@ -143,3 +144,26 @@ def write_in_memory_file_to_disk(in_memory_file): tmp_file.seek(0) tmp_file.close() return tmp_file + + +def chunked_queryset_iterator(queryset, size, *, ordering=('id',)): + """ + Split a queryset into chunks. + This can be used instead of `queryset.iterator()`, + so `.prefetch_related()` also works + Note:: + The ordering must uniquely identify the object, + and be in the same order (ASC/DESC). See https://github.com/photocrowd/django-cursor-pagination + """ + pager = CursorPaginator(queryset, ordering) + after = None + while True: + page = pager.page(after=after, first=size) + if page: + yield from page.items + else: + return + if not page.has_next: + break + # take last item, next page starts after this. + after = pager.cursor(instance=page[-1]) diff --git a/openbook_communities/migrations/0034_trendingcommunity.py b/openbook_communities/migrations/0034_trendingcommunity.py new file mode 100644 index 00000000..4a3fe5d0 --- /dev/null +++ b/openbook_communities/migrations/0034_trendingcommunity.py @@ -0,0 +1,22 @@ +# Generated by Django 2.2.5 on 2020-02-03 14:10 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_communities', '0033_auto_20191209_1337'), + ] + + operations = [ + migrations.CreateModel( + name='TrendingCommunity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', models.DateTimeField(db_index=True, editable=False)), + ('community', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='trending_community', to='openbook_communities.Community')), + ], + ), + ] diff --git a/openbook_communities/migrations/0035_community_activity_score.py b/openbook_communities/migrations/0035_community_activity_score.py new file mode 100644 index 00000000..4596d9a2 --- /dev/null +++ b/openbook_communities/migrations/0035_community_activity_score.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.5 on 2020-02-10 12:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_communities', '0034_trendingcommunity'), + ] + + operations = [ + migrations.AddField( + model_name='community', + name='activity_score', + field=models.FloatField(default=0.0), + ), + ] diff --git a/openbook_communities/migrations/0036_delete_trendingcommunity.py b/openbook_communities/migrations/0036_delete_trendingcommunity.py new file mode 100644 index 00000000..86376e58 --- /dev/null +++ b/openbook_communities/migrations/0036_delete_trendingcommunity.py @@ -0,0 +1,16 @@ +# Generated by Django 2.2.5 on 2020-02-22 11:49 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_communities', '0035_community_activity_score'), + ] + + operations = [ + migrations.DeleteModel( + name='TrendingCommunity', + ), + ] diff --git a/openbook_communities/migrations/0037_auto_20200222_1344.py b/openbook_communities/migrations/0037_auto_20200222_1344.py new file mode 100644 index 00000000..f56d8566 --- /dev/null +++ b/openbook_communities/migrations/0037_auto_20200222_1344.py @@ -0,0 +1,17 @@ +# Generated by Django 2.2.5 on 2020-02-22 12:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_communities', '0036_delete_trendingcommunity'), + ] + + operations = [ + migrations.AddIndex( + model_name='community', + index=models.Index(fields=['activity_score'], name='openbook_co_activit_07d4ba_idx'), + ), + ] diff --git a/openbook_communities/migrations/0038_auto_20200224_1615.py b/openbook_communities/migrations/0038_auto_20200224_1615.py new file mode 100644 index 00000000..252eeb03 --- /dev/null +++ b/openbook_communities/migrations/0038_auto_20200224_1615.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.5 on 2020-02-24 15:15 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_communities', '0037_auto_20200222_1344'), + ] + + operations = [ + migrations.AlterField( + model_name='community', + name='activity_score', + field=models.DecimalField(decimal_places=10, default=0.0, max_digits=10), + ), + ] diff --git a/openbook_communities/models.py b/openbook_communities/models.py index 38cf3e86..54df2b8e 100644 --- a/openbook_communities/models.py +++ b/openbook_communities/models.py @@ -66,9 +66,13 @@ class Community(models.Model): _('is deleted'), default=False, ) + activity_score = models.DecimalField(default=0.0, decimal_places=10, max_digits=10) class Meta: verbose_name_plural = 'communities' + indexes = [ + models.Index(fields=['activity_score']), + ] @classmethod def is_user_with_username_invited_to_community_with_name(cls, username, community_name): @@ -147,7 +151,12 @@ def get_new_user_suggested_communities(cls): def get_trending_communities_for_user_with_id(cls, user_id, category_name=None): trending_communities_query = cls._make_trending_communities_query(category_name=category_name) trending_communities_query.add(~Q(banned_users__id=user_id), Q.AND) - return cls._get_trending_communities_with_query(query=trending_communities_query) + + trending_communities = cls._get_trending_communities_with_query(query=trending_communities_query) + if trending_communities.count() == 0: + return cls.get_trending_communities_by_members_for_user_with_id(user_id, category_name=category_name) + + return trending_communities @classmethod def get_trending_communities(cls, category_name=None): @@ -156,11 +165,37 @@ def get_trending_communities(cls, category_name=None): @classmethod def _get_trending_communities_with_query(cls, query): + return cls.objects.filter(query).order_by('-activity_score') + + @classmethod + def _make_trending_communities_query(cls, category_name=None): + trending_communities_query = Q(type=Community.COMMUNITY_TYPE_PUBLIC, is_deleted=False) + trending_communities_query.add(Q(activity_score__gte=settings.MIN_ACTIVITY_SCORE_FOR_COMMUNITY_TRENDING), Q.AND) + trending_communities_query.add(~Q(moderated_object__status=ModeratedObject.STATUS_APPROVED), Q.AND) + + if category_name: + trending_communities_query.add(Q(categories__name=category_name), Q.AND) + + return trending_communities_query + + @classmethod + def get_trending_communities_by_members_for_user_with_id(cls, user_id, category_name=None): + trending_communities_query = cls._make_trending_communities_by_members_query(category_name=category_name) + trending_communities_query.add(~Q(banned_users__id=user_id), Q.AND) + return cls._get_trending_communities_by_members_with_query(query=trending_communities_query) + + @classmethod + def get_trending_communities_by_members(cls, category_name=None): + trending_communities_query = cls._make_trending_communities_by_members_query(category_name=category_name) + return cls._get_trending_communities_by_members_with_query(query=trending_communities_query) + + @classmethod + def _get_trending_communities_by_members_with_query(cls, query): return cls.objects.annotate(Count('memberships')).filter(query).order_by( '-memberships__count', '-created') @classmethod - def _make_trending_communities_query(cls, category_name=None): + def _make_trending_communities_by_members_query(cls, category_name=None): trending_communities_query = Q(type=cls.COMMUNITY_TYPE_PUBLIC, is_deleted=False) if category_name: diff --git a/openbook_communities/tests/views/communities/test_views.py b/openbook_communities/tests/views/communities/test_views.py index 685e0580..38f63a3e 100644 --- a/openbook_communities/tests/views/communities/test_views.py +++ b/openbook_communities/tests/views/communities/test_views.py @@ -5,7 +5,10 @@ from django.conf import settings from faker import Faker from rest_framework import status -from openbook_common.tests.models import OpenbookAPITestCase +from django_rq import get_worker, get_scheduler +from rq import SimpleWorker + +from openbook_common.tests.models import OpenbookAPITestCase, OpenbookAPITransactionTestCase from mixer.backend.django import mixer import logging @@ -13,7 +16,7 @@ from openbook_common.tests.helpers import make_user, make_authentication_headers_for_user, \ make_community_avatar, make_community_cover, make_category, make_community_users_adjective, \ - make_community_user_adjective, make_community + make_community_user_adjective, make_community, make_fake_post_text from openbook_common.utils.model_loaders import get_community_model from openbook_communities.models import Community @@ -1548,6 +1551,158 @@ def _get_url(self): return reverse('trending-communities') +class TrendingCommunitiesTransactionAPITests(OpenbookAPITransactionTestCase): + """ + TrendingCommunitiesTransactionAPITests + """ + + fixtures = [ + 'openbook_circles/fixtures/circles.json' + ] + + def test_displays_public_communities(self): + """ + should display public communities and return 200 + """ + user = make_user() + + amount_of_communities = 5 + communities_ids = [] + + for i in range(0, amount_of_communities): + community_owner = make_user() + community = make_community(creator=community_owner) + communities_ids.append(community.pk) + post = community_owner.create_community_post(text=make_fake_post_text(), community_name=community.name) + + # update activity scores + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) + + url = self._get_url() + response = self.client.get(url, **headers, format='multipart') + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_communities = json.loads(response.content) + + self.assertEqual(len(response_communities), len(communities_ids)) + self._clear_jobs_in_scheduler() + for response_community in response_communities: + response_community_id = response_community.get('id') + self.assertIn(response_community_id, communities_ids) + + def test_displays_only_public_communities_with_min_activity_score(self): + """ + should display only public communities with minimum activity score and return 200 + """ + user = make_user() + + amount_of_communities = 5 + communities_ids = [] + + for i in range(0, amount_of_communities): + community_owner = make_user() + community = make_community(creator=community_owner) + if i % 2 == 0: + communities_ids.append(community.pk) + post = community_owner.create_community_post(text=make_fake_post_text(), community_name=community.name) + + # update activity scores + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) + + url = self._get_url() + response = self.client.get(url, **headers, format='multipart') + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_communities = json.loads(response.content) + + self.assertEqual(len(response_communities), len(communities_ids)) + self._clear_jobs_in_scheduler() + for response_community in response_communities: + response_community_id = response_community.get('id') + self.assertIn(response_community_id, communities_ids) + + def test_not_displays_private_communities(self): + """ + should not display private communities and return 200 + """ + user = make_user() + + amount_of_communities = 5 + + Community = get_community_model() + + for i in range(0, amount_of_communities): + community_owner = make_user() + community = make_community(creator=community_owner, type=Community.COMMUNITY_TYPE_PRIVATE) + post = community_owner.create_community_post(text=make_fake_post_text(), community_name=community.name) + + # update activity scores + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) + + url = self._get_url() + + response = self.client.get(url, **headers, format='multipart') + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_communities = json.loads(response.content) + + self._clear_jobs_in_scheduler() + self.assertEqual(len(response_communities), 0) + + def test_does_not_display_community_banned_from(self): + """ + should not display a community banned from and return 200 + """ + user = make_user() + community_owner = make_user() + + community = make_community(creator=community_owner) + post = community_owner.create_community_post(text=make_fake_post_text(), community_name=community.name) + user.join_community_with_name(community_name=community.name) + + community_owner.ban_user_with_username_from_community_with_name(username=user.username, community_name=community.name) + + # update activity scores + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) + + url = self._get_url() + + response = self.client.get(url, **headers, format='multipart') + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_communities = json.loads(response.content) + self._clear_jobs_in_scheduler() + self.assertEqual(0, len(response_communities)) + + def _clear_jobs_in_scheduler(self): + default_scheduler = get_scheduler('process-activity-score') + for job in default_scheduler.get_jobs(): + default_scheduler.cancel(job.get_id()) + + def _add_version_header(self, headers): + headers['HTTP_ACCEPT'] = 'application/json; version=2.0' + return headers + + def _get_url(self): + return reverse('trending-communities') + + class TopPostsExcludedCommunitiesAPITests(OpenbookAPITestCase): """ TopPostsExcludedCommunitiesAPI diff --git a/openbook_communities/tests/views/community/posts/test_views.py b/openbook_communities/tests/views/community/posts/test_views.py index 4dc5c626..767c96dd 100644 --- a/openbook_communities/tests/views/community/posts/test_views.py +++ b/openbook_communities/tests/views/community/posts/test_views.py @@ -1,6 +1,10 @@ from django.urls import reverse +from django_rq import get_worker, get_scheduler +from django.conf import settings from faker import Faker -from openbook_common.tests.models import OpenbookAPITestCase +from rq import SimpleWorker + +from openbook_common.tests.models import OpenbookAPITestCase, OpenbookAPITransactionTestCase from rest_framework import status import logging @@ -855,6 +859,74 @@ def _get_url(self, community_name): }) +class CommunityPostsTransactionAPITests(OpenbookAPITransactionTestCase): + """ + CommunityPostsTransactionAPI + """ + + def test_create_community_post_updates_community_activity_score(self): + """ + should update community activity score when creating community post + """ + user = make_user() + + headers = make_authentication_headers_for_user(user=user) + community = make_community(creator=user) + + data = { + 'text': make_fake_post_text(), + } + + url = self._get_url(community_name=community.name) + response = self.client.put(url, data, **headers, format='multipart') + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + community.refresh_from_db() + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + expected_weight = settings.ACTIVITY_UNIQUE_POST_WEIGHT + settings.ACTIVITY_COUNT_POSTS_WEIGHT + + self._clear_jobs_in_scheduler() + self.assertEqual(community.activity_score, expected_weight) + + def test_create_second_community_post_updates_community_activity_score_appropriately(self): + """ + should update community activity score by only count weight when creating a second community post by same user + """ + user = make_user() + + headers = make_authentication_headers_for_user(user=user) + community = make_community(creator=user) + + data = { + 'text': make_fake_post_text(), + } + + url = self._get_url(community_name=community.name) + response = self.client.put(url, data, **headers, format='multipart') + + # create one more post + response = self.client.put(url, data, **headers, format='multipart') + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + + expected_weight = settings.ACTIVITY_UNIQUE_POST_WEIGHT + (2 * settings.ACTIVITY_COUNT_POSTS_WEIGHT) + + self._clear_jobs_in_scheduler() + self.assertEqual(community.activity_score, expected_weight) + + def _clear_jobs_in_scheduler(self): + default_scheduler = get_scheduler('process-activity-score') + for job in default_scheduler.get_jobs(): + default_scheduler.cancel(job.get_id()) + + def _get_url(self, community_name): + return reverse('community-posts', kwargs={ + 'community_name': community_name + }) + + class CommunityClosedPostsAPITest(OpenbookAPITestCase): def test_can_retrieve_closed_posts_from_community_if_administrator(self): diff --git a/openbook_communities/views/communities/serializers.py b/openbook_communities/views/communities/serializers.py index 36efb74b..2ffcb795 100644 --- a/openbook_communities/views/communities/serializers.py +++ b/openbook_communities/views/communities/serializers.py @@ -89,11 +89,22 @@ class GetFavoriteCommunitiesSerializer(serializers.Serializer): ) +class TrendingCommunitiesSerializerLegacy(serializers.Serializer): + category = serializers.CharField(max_length=settings.CATEGORY_NAME_MAX_LENGTH, + allow_blank=True, + required=False, + validators=[category_name_exists]) + + class TrendingCommunitiesSerializer(serializers.Serializer): category = serializers.CharField(max_length=settings.CATEGORY_NAME_MAX_LENGTH, allow_blank=True, required=False, validators=[category_name_exists]) + count = serializers.IntegerField( + required=False, + max_value=20 + ) class GetCommunitiesCommunityCategorySerializer(serializers.ModelSerializer): diff --git a/openbook_communities/views/communities/views.py b/openbook_communities/views/communities/views.py index 962bf567..9133584f 100644 --- a/openbook_communities/views/communities/views.py +++ b/openbook_communities/views/communities/views.py @@ -13,7 +13,8 @@ from openbook_communities.views.communities.serializers import CreateCommunitySerializer, \ CommunitiesCommunitySerializer, CommunityNameCheckSerializer, \ GetFavoriteCommunitiesSerializer, GetJoinedCommunitiesSerializer, TrendingCommunitiesSerializer, \ - GetModeratedCommunitiesSerializer, GetAdministratedCommunitiesSerializer, SuggestedCommunitiesCommunitySerializer + GetModeratedCommunitiesSerializer, GetAdministratedCommunitiesSerializer, SuggestedCommunitiesCommunitySerializer, \ + TrendingCommunitiesSerializerLegacy class Communities(APIView): @@ -233,11 +234,20 @@ def get(self, request): class TrendingCommunities(APIView): permission_classes = (IsAuthenticated, IsNotSuspended) + serializer_class_legacy = TrendingCommunitiesSerializerLegacy serializer_class = TrendingCommunitiesSerializer def get(self, request): + version = request.version + + if version == '2.0': + return self.get_trending_communities(request) + else: + return self.get_trending_communities_legacy(request) + + def get_trending_communities_legacy(self, request): query_params = request.query_params.dict() - serializer = self.serializer_class(data=query_params) + serializer = self.serializer_class_legacy(data=query_params) serializer.is_valid(raise_exception=True) data = serializer.data @@ -245,12 +255,29 @@ def get(self, request): user = request.user - communities = user.get_trending_communities(category_name=category_name)[:30] + communities = user.get_trending_communities_by_members(category_name=category_name)[:30] posts_serializer = CommonSearchCommunitiesCommunitySerializer(communities, many=True, context={"request": request}) return Response(posts_serializer.data, status=status.HTTP_200_OK) + def get_trending_communities(self, request): + query_params = request.query_params.dict() + serializer = self.serializer_class(data=query_params) + serializer.is_valid(raise_exception=True) + + data = serializer.data + category_name = data.get('category') + count = data.get('count', 10) + user = request.user + + trending_communities = user.get_trending_communities(category_name=category_name)[:count] + + trending_communities_serializer = CommonSearchCommunitiesCommunitySerializer( + trending_communities, many=True, context={"request": request}) + + return Response(trending_communities_serializer.data, status=status.HTTP_200_OK) + class FavoriteCommunities(APIView): permission_classes = (IsAuthenticated, IsNotSuspended) diff --git a/openbook_moderation/tests/views/checks.py b/openbook_moderation/tests/views/checks.py index 67e59b89..3c604560 100644 --- a/openbook_moderation/tests/views/checks.py +++ b/openbook_moderation/tests/views/checks.py @@ -41,8 +41,6 @@ def test_suspension_penalties_prevent_access(self): headers = make_authentication_headers_for_user(user) response = self.client.get(url, **headers) - print(response.content) - self.assertEqual(status.HTTP_403_FORBIDDEN, response.status_code) def test_expired_suspension_penalty_does_not_prevent_access(self): diff --git a/openbook_posts/jobs.py b/openbook_posts/jobs.py index 2032d009..812a1d27 100644 --- a/openbook_posts/jobs.py +++ b/openbook_posts/jobs.py @@ -1,13 +1,15 @@ from django.utils import timezone -from django_rq import job +from django_rq import job, get_scheduler, get_queue + +from openbook_common.utils.helpers import chunked_queryset_iterator from video_encoding import tasks -from datetime import timedelta -from django.db.models import Q, Count +from datetime import timedelta, datetime +from django.db.models import Q, Count, F from django.conf import settings -from cursor_pagination import CursorPaginator from openbook_common.utils.model_loaders import get_post_model, get_post_media_model, get_community_model, \ - get_top_post_model, get_post_comment_model, get_moderated_object_model, get_trending_post_model + get_top_post_model, get_post_comment_model, get_moderated_object_model, get_trending_post_model, \ + get_post_reaction_model import logging logger = logging.getLogger(__name__) @@ -53,6 +55,311 @@ def process_post_media(post_id): logger.info('Processed media of post with id: %d' % post_id) +def _reduce_atomic_community_activity_score(community_id, multiplier=1): + Community = get_community_model() + community = Community.objects.get(id=community_id) + if community: + community.activity_score = F('activity_score') - (multiplier * settings.ACTIVITY_ATOMIC_WEIGHT) + community.save() + + +def _process_community_activity_score_reaction_added(community, post_reaction_id): + default_scheduler = get_scheduler('process-activity-score') + expire_datetime = datetime.utcnow() + timedelta(hours=settings.ACTIVITY_SCORE_EXPIRY_IN_HOURS) + community.activity_score = F('activity_score') + settings.ACTIVITY_UNIQUE_REACTION_WEIGHT + community.save() + + # schedule reduction of activity scores + default_scheduler.enqueue_at(expire_datetime, _reduce_atomic_community_activity_score, + community.pk, + multiplier=settings.ACTIVITY_UNIQUE_REACTION_MULTIPLIER, + job_id='expire_community_{0}_rid_{1}_unique_reaction'.format( + community.pk, post_reaction_id)) + + +def _process_community_activity_score_reaction_deleted(community, post_reaction_id): + default_scheduler = get_scheduler('process-activity-score') + reaction_job_id = 'expire_community_{0}_rid_{1}_unique_reaction'.format(community.pk, post_reaction_id) + + if reaction_job_id in default_scheduler: + default_scheduler.cancel(reaction_job_id) + community.activity_score = F('activity_score') - settings.ACTIVITY_UNIQUE_REACTION_WEIGHT + community.save() + + +def process_activity_score_post_reaction(post_id, post_reaction_id): + """ + This job is called to process activity score on a post after add/remove reaction + """ + remove_reaction_job_id = 'process_remove_unique_reaction_pid_{0}_rid_{1}'.format(post_id, post_reaction_id) + default_queue = get_queue('process-activity-score') + remove_job = default_queue.fetch_job(remove_reaction_job_id) + + if remove_reaction_job_id in default_queue.job_ids: + # remove job is also queued, jobs cancel each other, return + remove_job.cancel() + return + + Post = get_post_model() + PostReaction = get_post_reaction_model() + Community = get_community_model() + + if not Post.objects.filter(pk=post_id).exists(): + # if post was deleted, return + return + + post = Post.objects.get(pk=post_id) + logger.info('Processing activity score for reaction of post with id: %d' % post_id) + + if post.community is not None and post.community.type is Community.COMMUNITY_TYPE_PUBLIC: + if not PostReaction.objects.filter(pk=post_reaction_id).exists(): + # reaction was deleted + post.activity_score = F('activity_score') - settings.ACTIVITY_UNIQUE_REACTION_WEIGHT + _process_community_activity_score_reaction_deleted(post.community, post_reaction_id) + else: + # reaction was added + post.activity_score = F('activity_score') + settings.ACTIVITY_UNIQUE_REACTION_WEIGHT + _process_community_activity_score_reaction_added(post.community, post_reaction_id) + + elif post.community is None and not PostReaction.objects.filter(pk=post_reaction_id).exists(): + # reaction was deleted + post.activity_score = F('activity_score') - settings.ACTIVITY_UNIQUE_REACTION_WEIGHT + else: + # reaction was added + post.activity_score = F('activity_score') + settings.ACTIVITY_UNIQUE_REACTION_WEIGHT + + post.save() + logger.info('Processed activity score for reaction of post with id: %d' % post_id) + + +def _process_post_activity_score_comment_deleted(post, commenter_comments_count): + if commenter_comments_count > 0: + # there are still other comments by this user + post.activity_score = F('activity_score') - settings.ACTIVITY_COUNT_COMMENTS_WEIGHT + else: + # no more comments anymore by this user, subtract the unique comment weight too + post.activity_score = F('activity_score') - \ + settings.ACTIVITY_UNIQUE_COMMENT_WEIGHT - \ + settings.ACTIVITY_COUNT_COMMENTS_WEIGHT + + +def _process_post_activity_score_comment_added(post, commenter_comments_count): + if commenter_comments_count > 1: + post.activity_score = F('activity_score') + settings.ACTIVITY_COUNT_COMMENTS_WEIGHT + elif commenter_comments_count == 1: + post.activity_score = F('activity_score') + \ + settings.ACTIVITY_UNIQUE_COMMENT_WEIGHT + \ + settings.ACTIVITY_COUNT_COMMENTS_WEIGHT + + +def _process_community_activity_score_comment_deleted(community, + post_id, + post_comment_id, + post_commenter_id, + commenter_comments_count): + + default_scheduler = get_scheduler('process-activity-score') + job_id = 'expire_community_{0}_pid_{1}_uid_{2}_cid_{3}'.format(community.pk, post_id, + post_commenter_id, post_comment_id) + unique_comment_job_id = 'expire_community_{0}_pid_{1}_uid_{2}_unique_comment'.format(community.pk, + post_id, post_commenter_id) + + if job_id in default_scheduler: + # there are still other comments by this user + default_scheduler.cancel(job_id) + community.activity_score = F('activity_score') - settings.ACTIVITY_COUNT_COMMENTS_WEIGHT + community.save() + + if commenter_comments_count == 0 and unique_comment_job_id in default_scheduler: + # no more comments anymore by this user, subtract the unique comment weight too + community.activity_score = F('activity_score') - settings.ACTIVITY_UNIQUE_COMMENT_WEIGHT + default_scheduler.cancel(unique_comment_job_id) + community.save() + + +def _process_community_activity_score_comment_added(community, + post_id, + post_comment_id, + post_commenter_id): + default_scheduler = get_scheduler('process-activity-score') + unique_comment_job_id = 'expire_community_{0}_pid_{1}_uid_{2}_unique_comment'.format(community.pk, + post_id, post_commenter_id) + expire_datetime = timezone.now() + timedelta(hours=settings.ACTIVITY_SCORE_EXPIRY_IN_HOURS) + + if unique_comment_job_id in default_scheduler: + community.activity_score = F('activity_score') + settings.ACTIVITY_COUNT_COMMENTS_WEIGHT + else: + community.activity_score = F('activity_score') + \ + settings.ACTIVITY_UNIQUE_COMMENT_WEIGHT + \ + settings.ACTIVITY_COUNT_COMMENTS_WEIGHT + community.save() + if unique_comment_job_id in default_scheduler: + default_scheduler.cancel(unique_comment_job_id) + + # schedule reduction of activity scores + default_scheduler.enqueue_at(expire_datetime, _reduce_atomic_community_activity_score, + community.pk, + multiplier=settings.ACTIVITY_UNIQUE_COMMENT_MULTIPLIER, + job_id=unique_comment_job_id) + default_scheduler.enqueue_at(expire_datetime, _reduce_atomic_community_activity_score, + community.pk, + multiplier=settings.ACTIVITY_COUNT_COMMENTS_MULTIPLIER, + job_id='expire_community_{0}_pid_{1}_uid_{2}_cid_{3}'.format( + community.pk, post_id, post_commenter_id, post_comment_id) + ) + + +def process_activity_score_post_comment(post_id, post_comment_id, post_commenter_id): + """ + This job is called to process activity score on a post after add/remove comment + """ + delete_comment_job_id = 'process_delete_comment_pid_{0}_cid_{1}'.format(post_id, post_comment_id) + default_queue = get_queue('process-activity-score') + delete_job = default_queue.fetch_job(delete_comment_job_id) + + if delete_comment_job_id in default_queue.job_ids: + # remove job is also queued, jobs cancel each other, return + delete_job.cancel() + return + + Post = get_post_model() + PostComment = get_post_comment_model() + Community = get_community_model() + + if not Post.objects.filter(pk=post_id, is_deleted=False, is_closed=False).exists(): + # if post was deleted, soft deleted or closed return + return + + post = Post.objects.get(pk=post_id) + logger.info('Processing activity score for comment with id: %d' % post_comment_id) + + commenter_comments_count = PostComment.objects.filter(post_id=post_id, + is_deleted=False, + commenter_id=post_commenter_id).count() + + if post.community is not None and post.community.type is Community.COMMUNITY_TYPE_PUBLIC: + if not PostComment.objects.filter(pk=post_comment_id).exists(): + # comment was deleted + _process_post_activity_score_comment_deleted(post, commenter_comments_count) + _process_community_activity_score_comment_deleted(post.community, + post_id, + post_comment_id, + post_commenter_id, + commenter_comments_count) + else: + # comment was added + _process_post_activity_score_comment_added(post, commenter_comments_count) + _process_community_activity_score_comment_added(post.community, + post_id, + post_comment_id, + post_commenter_id) + else: + if not PostComment.objects.filter(pk=post_comment_id).exists(): + # comment was deleted + _process_post_activity_score_comment_deleted(post, commenter_comments_count) + else: + # comment was added + _process_post_activity_score_comment_added(post, commenter_comments_count) + + post.save() + logger.info('Processed activity score for comment with id: %d' % post_comment_id) + + +def _process_community_activity_score_post_added(post, total_posts_by_creator): + default_scheduler = get_scheduler('process-activity-score') + expire_datetime = timezone.now() + timedelta(hours=settings.ACTIVITY_SCORE_EXPIRY_IN_HOURS) + unique_post_job_id = 'expire_community_{0}_uid_{1}_unique_post'.format( + post.community.pk, + post.creator.pk) + + if unique_post_job_id in default_scheduler: + post.community.activity_score = F('activity_score') + settings.ACTIVITY_COUNT_POSTS_WEIGHT + else: + post.community.activity_score = F('activity_score') + \ + settings.ACTIVITY_UNIQUE_POST_WEIGHT + \ + settings.ACTIVITY_COUNT_POSTS_WEIGHT + + post.community.save() + if unique_post_job_id in default_scheduler: + default_scheduler.cancel(unique_post_job_id) + + # schedule reduction of activity scores + default_scheduler.enqueue_at(expire_datetime, _reduce_atomic_community_activity_score, + post.community.pk, + multiplier=settings.ACTIVITY_UNIQUE_POST_MULTIPLIER, + job_id=unique_post_job_id) + default_scheduler.enqueue_at(expire_datetime, _reduce_atomic_community_activity_score, + post.community.pk, + multiplier=settings.ACTIVITY_COUNT_POSTS_MULTIPLIER, + job_id='expire_community_{0}_pid_{1}'.format( + post.community.pk, post.pk) + ) + + +def _process_community_activity_score_post_deleted(post_id, post_creator_id, + post_community_id, total_posts_by_creator): + + default_scheduler = get_scheduler('process-activity-score') + job_id = 'expire_community_{0}_pid_{1}'.format(post_community_id, post_id) + unique_post_job_id = 'expire_community_{0}_uid_{1}_unique_post'.format(post_community_id, + post_creator_id) + + Community = get_community_model() + community = Community.objects.get(id=post_community_id) + if job_id in default_scheduler: + default_scheduler.cancel(job_id) + community.activity_score = F('activity_score') - settings.ACTIVITY_COUNT_POSTS_WEIGHT + community.save() + + if total_posts_by_creator == 0 and unique_post_job_id in default_scheduler: + community.activity_score = F('activity_score') - settings.ACTIVITY_UNIQUE_POST_WEIGHT + default_scheduler.cancel(unique_post_job_id) + community.save() + + +def process_community_activity_score_post(post_id, post_creator_id, post_community_id): + """ + This job is called to process activity score on a community after add/remove post + """ + logger.info('Processing community activity score for create/delete post with id: %d' % post_id) + + delete_post_job_id = 'process_delete_community_post_community_{0}_pid_{1}_uid_{2}'.format(post_community_id, + post_id, + post_creator_id) + default_queue = get_queue('process-activity-score') + delete_post_job = default_queue.fetch_job(delete_post_job_id) + + if delete_post_job is not None and delete_post_job.is_queued: + # delete post job is also queued, jobs cancel each other, return + delete_post_job.cancel() + return + + Post = get_post_model() + Community = get_community_model() + if not Community.objects.filter(id=post_community_id, is_deleted=False).exists(): + # if community was deleted, soft deleted return + return + + creator_posts_query = Q(created__gte=timezone.now() - timedelta(hours=settings.ACTIVITY_SCORE_EXPIRY_IN_HOURS)) + creator_posts_query.add(Q(creator_id=post_creator_id, + community_id=post_community_id, + is_closed=False, is_deleted=False), Q.AND) + + total_posts_by_creator = Post.objects.filter(creator_posts_query).count() + + if Post.objects.filter(pk=post_id, is_closed=False, is_deleted=False).exists(): + # post was added + post = Post.objects.get(pk=post_id) + _process_community_activity_score_post_added(post, total_posts_by_creator) + else: + # post was removed + _process_community_activity_score_post_deleted(post_id, + post_creator_id, + post_community_id, + total_posts_by_creator) + logger.info('Processed community activity score for create/delete post with id: %d' % post_id) + + @job('low') def curate_top_posts(): """ @@ -91,7 +398,7 @@ def curate_top_posts(): total_checked_posts = 0 total_curated_posts = 0 - for post in _chunked_queryset_iterator(posts, 1000): + for post in chunked_queryset_iterator(posts, 1000): total_checked_posts = total_checked_posts + 1 if not post.reactions_count >= settings.MIN_UNIQUE_TOP_POST_REACTIONS_COUNT: unique_comments_count = PostComment.objects.filter(post=post). \ @@ -178,7 +485,7 @@ def clean_top_posts(): delete_ids = [] - for top_post in _chunked_queryset_iterator(top_posts, 1000): + for top_post in chunked_queryset_iterator(top_posts, 1000): if not top_post.reactions_count >= settings.MIN_UNIQUE_TOP_POST_REACTIONS_COUNT: unique_comments_count = PostComment.objects.filter(post=top_post.post). \ values('commenter_id'). \ @@ -220,20 +527,18 @@ def curate_trending_posts(): trending_posts_query.add(trending_posts_community_query, Q.AND) - posts_select_related = 'community' - posts_prefetch_related = 'reactions__reactor' - posts_only = ('id', 'status', 'is_deleted', 'is_closed', 'community__type') + trending_posts_criteria_query = Q(activity_score__gte=settings.MIN_ACTIVITY_SCORE_FOR_POST_TRENDING) + + trending_posts_query.add(trending_posts_criteria_query, Q.AND) - trending_posts_criteria_query = Q(reactions_count__gte=settings.MIN_UNIQUE_TRENDING_POST_REACTIONS_COUNT) + posts_select_related = 'community' + posts_only = ('id', 'status', 'activity_score', 'is_deleted', 'is_closed', 'community__type') posts = Post.objects. \ select_related(posts_select_related). \ - prefetch_related(posts_prefetch_related). \ only(*posts_only). \ filter(trending_posts_query). \ - annotate(reactions_count=Count('reactions__reactor_id')).\ - filter(trending_posts_criteria_query).\ - order_by('-reactions_count', '-created')[:30] + order_by('-activity_score', '-created')[:30] trending_posts_objects = [] @@ -267,26 +572,24 @@ def bootstrap_trending_posts(): trending_posts_community_query.add(~Q(moderated_object__status=ModeratedObject.STATUS_APPROVED), Q.AND) - posts_select_related = 'community' - posts_prefetch_related = 'reactions__reactor' - posts_only = ('id', 'status', 'is_deleted', 'is_closed', 'community__type') + trending_posts_criteria_query = Q(activity_score__gte=settings.MIN_ACTIVITY_SCORE_FOR_POST_TRENDING) + + trending_posts_community_query.add(trending_posts_criteria_query, Q.AND) - trending_posts_criteria_query = Q(reactions_count__gte=settings.MIN_UNIQUE_TRENDING_POST_REACTIONS_COUNT) + posts_select_related = 'community' + posts_only = ('id', 'status', 'activity_score', 'is_deleted', 'is_closed', 'community__type') posts = Post.objects. \ select_related(posts_select_related). \ - prefetch_related(posts_prefetch_related). \ only(*posts_only). \ filter(trending_posts_community_query). \ - annotate(reactions_count=Count('reactions__reactor_id')). \ - filter(trending_posts_criteria_query). \ order_by('-created') trending_posts_objects = [] total_curated_posts = 0 total_checked_posts = 0 - for post in _chunked_queryset_iterator(posts, 1000): + for post in chunked_queryset_iterator(posts, 1000): total_checked_posts += 1 trending_post = TrendingPost(post=post, created=timezone.now()) trending_posts_objects.append(trending_post) @@ -321,9 +624,11 @@ def clean_trending_posts(): trending_posts_community_query.add(Q(post__status=Post.STATUS_DRAFT), Q.OR) trending_posts_community_query.add(Q(post__status=Post.STATUS_PROCESSING), Q.OR) trending_posts_community_query.add(Q(post__moderated_object__status=ModeratedObject.STATUS_APPROVED), Q.OR) + trending_posts_community_query.add(Q(post__activity_score__lt=settings.MIN_ACTIVITY_SCORE_FOR_POST_TRENDING), Q.OR) posts_select_related = 'post__community' - posts_only = ('post__id', 'post__status', 'post__is_deleted', 'post__is_closed', 'post__community__type') + posts_only = ('post__id', 'post__status', 'post__activity_score', 'post__is_deleted', 'post__is_closed', + 'post__community__type') removable_trending_posts = TrendingPost.objects.select_related(posts_select_related). \ only(*posts_only). \ @@ -333,38 +638,3 @@ def clean_trending_posts(): # delete posts TrendingPost.objects.filter(id__in=direct_removable_delete_ids).delete() - - # Now we filter trending posts that do not meet criteria anymore - trending_posts_criteria_query = Q(reactions_count__lt=settings.MIN_UNIQUE_TRENDING_POST_REACTIONS_COUNT) - - less_than_min_reactions_trending_posts = TrendingPost.objects.\ - prefetch_related('post__reactions__reactor'). \ - only('id'). \ - annotate(reactions_count=Count('post__reactions__reactor_id')). \ - filter(trending_posts_criteria_query) - - delete_ids = [trending_post.pk for trending_post in less_than_min_reactions_trending_posts] - TrendingPost.objects.filter(id__in=delete_ids).delete() - - -def _chunked_queryset_iterator(queryset, size, *, ordering=('id',)): - """ - Split a queryset into chunks. - This can be used instead of `queryset.iterator()`, - so `.prefetch_related()` also works - Note:: - The ordering must uniquely identify the object, - and be in the same order (ASC/DESC). See https://github.com/photocrowd/django-cursor-pagination - """ - pager = CursorPaginator(queryset, ordering) - after = None - while True: - page = pager.page(after=after, first=size) - if page: - yield from page.items - else: - return - if not page.has_next: - break - # take last item, next page starts after this. - after = pager.cursor(instance=page[-1]) diff --git a/openbook_posts/migrations/0068_post_activity_score.py b/openbook_posts/migrations/0068_post_activity_score.py new file mode 100644 index 00000000..f8d3fa90 --- /dev/null +++ b/openbook_posts/migrations/0068_post_activity_score.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.5 on 2020-01-28 12:30 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_posts', '0067_merge_20191202_1731'), + ] + + operations = [ + migrations.AddField( + model_name='post', + name='activity_score', + field=models.FloatField(default=0.0), + ), + ] diff --git a/openbook_posts/migrations/0069_auto_20200130_1529.py b/openbook_posts/migrations/0069_auto_20200130_1529.py new file mode 100644 index 00000000..35a02660 --- /dev/null +++ b/openbook_posts/migrations/0069_auto_20200130_1529.py @@ -0,0 +1,17 @@ +# Generated by Django 2.2.5 on 2020-01-30 14:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_posts', '0068_post_activity_score'), + ] + + operations = [ + migrations.AddIndex( + model_name='post', + index=models.Index(fields=['activity_score'], name='openbook_po_activit_4b4ca7_idx'), + ), + ] diff --git a/openbook_posts/migrations/0070_merge_20200222_1342.py b/openbook_posts/migrations/0070_merge_20200222_1342.py new file mode 100644 index 00000000..70b0496e --- /dev/null +++ b/openbook_posts/migrations/0070_merge_20200222_1342.py @@ -0,0 +1,14 @@ +# Generated by Django 2.2.5 on 2020-02-22 12:42 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_posts', '0069_auto_20200130_1529'), + ('openbook_posts', '0068_profilepostscommunityexclusion'), + ] + + operations = [ + ] diff --git a/openbook_posts/migrations/0071_auto_20200224_1615.py b/openbook_posts/migrations/0071_auto_20200224_1615.py new file mode 100644 index 00000000..87c9e909 --- /dev/null +++ b/openbook_posts/migrations/0071_auto_20200224_1615.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.5 on 2020-02-24 15:15 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('openbook_posts', '0070_merge_20200222_1342'), + ] + + operations = [ + migrations.AlterField( + model_name='post', + name='activity_score', + field=models.DecimalField(decimal_places=10, default=0.0, max_digits=10), + ), + ] diff --git a/openbook_posts/models.py b/openbook_posts/models.py index 7918e090..6326bc02 100644 --- a/openbook_posts/models.py +++ b/openbook_posts/models.py @@ -14,6 +14,8 @@ from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from django.db.models import Count +from django.db import transaction +import django_rq import ffmpy # Create your views here. @@ -52,7 +54,8 @@ check_mimetype_is_supported_media_mimetypes from openbook_posts.helpers import upload_to_post_image_directory, upload_to_post_video_directory, \ upload_to_post_directory -from openbook_posts.jobs import process_post_media +from openbook_posts.jobs import process_post_media, process_activity_score_post_reaction, \ + process_activity_score_post_comment, process_community_activity_score_post magic = get_magic() from openbook_common.helpers import get_language_for_text @@ -92,11 +95,15 @@ class Post(models.Model): upload_to=upload_to_post_directory, blank=False, null=True, format='JPEG', options={'quality': 30}, processors=[ResizeToFit(width=512, upscale=False)]) + activity_score = models.DecimalField(default=0.0, decimal_places=10, max_digits=10) class Meta: index_together = [ ('creator', 'community'), ] + indexes = [ + models.Index(fields=['activity_score']), + ] @classmethod def get_post_id_for_post_with_uuid(cls, post_uuid): @@ -206,42 +213,6 @@ def get_trending_posts_for_user_with_id(cls, user_id, max_id=None, min_id=None): return trending_community_posts_queryset - @classmethod - def get_trending_posts_old_for_user_with_id(cls, user_id): - """ - For backwards compatibility reasons - """ - trending_posts_query = cls._get_trending_posts_old_query() - trending_posts_query.add(~Q(community__banned_users__id=user_id), Q.AND) - - trending_posts_query.add(~Q(Q(creator__blocked_by_users__blocker_id=user_id) | Q( - creator__user_blocks__blocked_user_id=user_id)), Q.AND) - - trending_posts_query.add(~Q(moderated_object__reports__reporter_id=user_id), Q.AND) - - trending_posts_query.add(~Q(moderated_object__status=ModeratedObject.STATUS_APPROVED), Q.AND) - - return cls._get_trending_posts_old_with_query(query=trending_posts_query) - - @classmethod - def _get_trending_posts_old_with_query(cls, query): - return cls.objects.filter(query).annotate(Count('reactions')).order_by( - '-reactions__count', '-created') - - @classmethod - def _get_trending_posts_old_query(cls): - trending_posts_query = Q(created__gte=timezone.now() - timedelta( - hours=12)) - - Community = get_community_model() - - trending_posts_sources_query = Q(community__type=Community.COMMUNITY_TYPE_PUBLIC, status=cls.STATUS_PUBLISHED, - is_closed=False, is_deleted=False) - - trending_posts_query.add(trending_posts_sources_query, Q.AND) - - return trending_posts_query - @classmethod def get_post_comment_notification_target_users(cls, post, post_commenter): """ @@ -528,6 +499,7 @@ def _publish(self): self.status = Post.STATUS_PUBLISHED self.created = timezone.now() self._process_post_subscribers() + self._enqueue_process_activity_score_add_community_post() self.save() def is_draft(self): @@ -555,6 +527,7 @@ def save(self, *args, **kwargs): def delete(self, *args, **kwargs): self.delete_media() + self._enqueue_process_activity_score_delete_community_post() super(Post, self).delete(*args, **kwargs) def delete_media(self): @@ -563,6 +536,7 @@ def delete_media(self): def soft_delete(self): self.delete_notifications() + self._enqueue_process_activity_score_delete_community_post() for comment in self.comments.all().iterator(): comment.soft_delete() self.is_deleted = True @@ -789,6 +763,37 @@ def _process_post_subscribers(self): user_notifications_subscription_id=subscription.pk) send_user_new_post_push_notification(user_notifications_subscription=subscription, post=self) + def _enqueue_process_activity_score_add_community_post(self): + if self.community is None: + return + + add_post_job_id = 'process_add_community_post_community_{0}_pid_{1}_uid_{2}'.format(self.community.pk, + self.pk, + self.creator.pk) + queue = django_rq.get_queue('process-activity-score') + transaction.on_commit(lambda: queue.enqueue(process_community_activity_score_post, + post_id=self.pk, + post_creator_id=self.creator.pk, + post_community_id=self.community.pk, + job_id=add_post_job_id)) + + def _enqueue_process_activity_score_delete_community_post(self): + if self.community is None: + return + + delete_post_job_id = 'process_delete_community_post_community_{0}_pid_{1}_uid_{2}'.format(self.community.pk, + self.pk, + self.creator.pk) + queue = django_rq.get_queue('process-activity-score') + post_id = self.pk + post_creator_id = self.creator.pk + post_community_id = self.community.pk + transaction.on_commit(lambda: queue.enqueue(process_community_activity_score_post, + post_id=post_id, + post_creator_id=post_creator_id, + post_community_id=post_community_id, + job_id=delete_post_job_id)) + class TopPost(models.Model): post = models.OneToOneField(Post, on_delete=models.CASCADE, related_name='top_post') @@ -975,8 +980,12 @@ def create_comment(cls, text, commenter, post, parent_comment=None): post_comment = PostComment.objects.create(text=text, commenter=commenter, post=post, parent_comment=parent_comment) post_comment.language = get_language_for_text(text) + add_comment_job_id = 'process_add_comment_pid_{0}_cid_{1}'.format(post.pk, post_comment.pk) + transaction.on_commit(lambda: PostComment.enqueue_process_activity_score_job(post_id=post.pk, + post_comment_id=post_comment.pk, + post_commenter_id=commenter.pk, + job_id=add_comment_job_id)) post_comment.save() - return post_comment @classmethod @@ -990,6 +999,15 @@ def get_emoji_counts_for_post_comment_with_id(cls, post_comment_id, emoji_id=Non return Emoji.get_emoji_counts_for_post_comment_with_id(post_comment_id=post_comment_id, emoji_id=emoji_id, reactor_id=reactor_id) + @classmethod + def enqueue_process_activity_score_job(cls, post_id, post_comment_id, post_commenter_id, job_id): + queue = django_rq.get_queue('process-activity-score') + queue.enqueue(process_activity_score_post_comment, + post_id=post_id, + post_comment_id=post_comment_id, + post_commenter_id=post_commenter_id, + job_id=job_id) + def count_replies(self): return self.replies.count() @@ -1128,6 +1146,25 @@ def soft_delete(self): self.is_deleted = True self.delete_notifications() self.save() + delete_comment_job_id = 'process_delete_comment_pid_{0}_cid_{1}'.format(self.post.pk, self.pk) + transaction.on_commit(lambda: PostComment.enqueue_process_activity_score_job(post_id=self.post.pk, + post_comment_id=self.pk, + post_commenter_id=self.commenter.pk, + job_id=delete_comment_job_id)) + + def delete(self, *args, **kwargs): + if not self.is_deleted: + post_id = self.post.pk + post_comment_id = self.pk + post_commenter_id = self.commenter.pk + delete_comment_job_id = 'process_delete_comment_pid_{0}_cid_{1}'.format(post_id, post_comment_id) + transaction.on_commit(lambda: + PostComment.enqueue_process_activity_score_job(post_id=post_id, + post_comment_id=post_comment_id, + post_commenter_id=post_commenter_id, + job_id=delete_comment_job_id)) + + super(PostComment, self).delete(*args, **kwargs) def unsoft_delete(self): self.is_deleted = False @@ -1194,9 +1231,23 @@ class PostReaction(models.Model): class Meta: unique_together = ('reactor', 'post',) + @classmethod + def enqueue_process_activity_score_job(cls, post_id, post_reaction_id, job_id): + queue = django_rq.get_queue('process-activity-score') + queue.enqueue(process_activity_score_post_reaction, + post_id=post_id, + post_reaction_id=post_reaction_id, + job_id=job_id) + @classmethod def create_reaction(cls, reactor, emoji_id, post): - return PostReaction.objects.create(reactor=reactor, emoji_id=emoji_id, post=post) + post_reaction = PostReaction.objects.create(reactor=reactor, emoji_id=emoji_id, post=post) + job_id = 'process_add_unique_reaction_pid_{0}_rid_{1}'.format(post.pk, post_reaction.pk) + transaction.on_commit(lambda: PostReaction.enqueue_process_activity_score_job(post_id=post.pk, + post_reaction_id=post_reaction.pk, + job_id=job_id)) + + return post_reaction @classmethod def count_reactions_for_post_with_id(cls, post_id, reactor_id=None): @@ -1207,6 +1258,17 @@ def count_reactions_for_post_with_id(cls, post_id, reactor_id=None): return cls.objects.filter(count_query).count() + def delete(self, *args, **kwargs): + reaction_id = self.pk + post_id = self.post.pk + job_id = 'process_remove_unique_reaction_pid_{0}_rid_{1}'.format(post_id, reaction_id) + transaction.on_commit(lambda: PostReaction.enqueue_process_activity_score_job( + post_id=post_id, + post_reaction_id=reaction_id, + job_id=job_id)) + + super(PostReaction, self).delete(*args, **kwargs) + def save(self, *args, **kwargs): ''' On save, update timestamps ''' if not self.id: diff --git a/openbook_posts/tests/views/test_post.py b/openbook_posts/tests/views/test_post.py index 1aebf7d8..b7b1fa42 100644 --- a/openbook_posts/tests/views/test_post.py +++ b/openbook_posts/tests/views/test_post.py @@ -5,34 +5,33 @@ from PIL import Image from django.urls import reverse -from django_rq import get_worker -from django_rq.queues import get_queues +from django_rq import get_worker, get_scheduler from faker import Faker +from django.db import transaction from rest_framework import status -from openbook_common.tests.models import OpenbookAPITestCase +from openbook_common.tests.models import OpenbookAPITestCase, OpenbookAPITransactionTestCase from django.core.files.images import ImageFile from django.core.files import File -from django.core.cache import cache from django.conf import settings from unittest import mock import logging -from rq import SimpleWorker, Worker +from rq import SimpleWorker from openbook_common.tests.helpers import make_authentication_headers_for_user, make_fake_post_text, \ make_fake_post_comment_text, make_user, make_circle, make_community, make_moderation_category, \ - get_test_videos, get_test_image, make_proxy_blacklisted_domain, make_hashtag, make_hashtag_name, \ + get_test_videos, get_test_image, make_hashtag, make_hashtag_name, \ make_reactions_emoji_group, make_emoji -from openbook_common.utils.model_loaders import get_language_model, get_community_new_post_notification_model, \ +from openbook_common.utils.model_loaders import get_community_new_post_notification_model, \ get_post_comment_notification_model, get_post_comment_user_mention_notification_model, \ get_post_user_mention_notification_model, get_post_comment_reaction_notification_model, \ get_post_comment_reply_notification_model +from openbook_common.utils.model_loaders import get_language_model from openbook_communities.models import Community from openbook_hashtags.models import Hashtag from openbook_notifications.models import PostUserMentionNotification, Notification from openbook_posts.models import Post, PostUserMention, PostMedia -from openbook_common.models import ProxyBlacklistedDomain logger = logging.getLogger(__name__) fake = Faker() @@ -1304,6 +1303,82 @@ def _get_url(self, post): }) +class PostItemTransactionAPITests(OpenbookAPITransactionTestCase): + + def test_reduces_community_activity_score_on_delete_post(self): + """ + should reduce community activity score on delete post and return 200 + """ + self._clear_jobs_in_scheduler() + user = make_user() + headers = make_authentication_headers_for_user(user) + community = make_community(creator=user) + with transaction.atomic(): + post = user.create_community_post(text=make_fake_post_text(), community_name=community.name) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + + activity_score_before_delete = community.activity_score + + url = self._get_url(post) + response = self.client.delete(url, **headers) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + + expected_weight = activity_score_before_delete - \ + settings.ACTIVITY_UNIQUE_POST_WEIGHT - \ + settings.ACTIVITY_COUNT_POSTS_WEIGHT + + self._clear_jobs_in_scheduler() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(community.activity_score, expected_weight) + + def test_reduces_community_activity_score_appropriately_on_delete_two_posts_by_same_creator(self): + """ + should reduce community activity score correctly on delete two posts by same creator + """ + user = make_user() + headers = make_authentication_headers_for_user(user) + community = make_community(creator=user) + with transaction.atomic(): + post_1 = user.create_community_post(text=make_fake_post_text(), community_name=community.name) + post_2 = user.create_community_post(text=make_fake_post_text(), community_name=community.name) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + + activity_score_before_delete = community.activity_score + + url_1 = self._get_url(post_1) + response_1 = self.client.delete(url_1, **headers) + url_2 = self._get_url(post_2) + response_2 = self.client.delete(url_2, **headers) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + + expected_weight = activity_score_before_delete - \ + settings.ACTIVITY_UNIQUE_POST_WEIGHT - \ + (2 * settings.ACTIVITY_COUNT_POSTS_WEIGHT) + + self._clear_jobs_in_scheduler() + self.assertEqual(response_1.status_code, status.HTTP_200_OK) + self.assertEqual(response_2.status_code, status.HTTP_200_OK) + self.assertEqual(community.activity_score, expected_weight) + + def _clear_jobs_in_scheduler(self): + default_scheduler = get_scheduler('process-activity-score') + for job in default_scheduler.get_jobs(): + default_scheduler.cancel(job.get_id()) + + def _get_url(self, post): + return reverse('post', kwargs={ + 'post_uuid': post.uuid + }) + + class MutePostAPITests(OpenbookAPITestCase): """ MutePostAPI diff --git a/openbook_posts/tests/views/test_post_comment.py b/openbook_posts/tests/views/test_post_comment.py index da433e01..63fd67a7 100644 --- a/openbook_posts/tests/views/test_post_comment.py +++ b/openbook_posts/tests/views/test_post_comment.py @@ -1,7 +1,11 @@ from django.urls import reverse +from django_rq import get_worker, get_scheduler +from django.conf import settings from faker import Faker from rest_framework import status -from openbook_common.tests.models import OpenbookAPITestCase +from rq import SimpleWorker + +from openbook_common.tests.models import OpenbookAPITestCase, OpenbookAPITransactionTestCase from unittest import mock import json @@ -2163,6 +2167,110 @@ def _get_url(self, post, post_comment): }) +class PostCommentItemTransactionAPITests(OpenbookAPITransactionTestCase): + + fixtures = [ + 'openbook_circles/fixtures/circles.json' + ] + + def test_delete_comment_in_post_reduces_post_activity_score(self): + """ + should reduce post activity score on delete comment in post and return 200 + """ + user = make_user() + commenter = make_user() + + post = user.create_public_post(text=make_fake_post_text()) + post_comment = commenter.comment_post_with_id(post.pk, text=make_fake_post_comment_text()) + + url = self._get_url(post_comment=post_comment, post=post) + + headers = make_authentication_headers_for_user(user) + response = self.client.delete(url, **headers) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + post.refresh_from_db() + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self._clear_jobs_in_scheduler() + self.assertEqual(post.activity_score, 0.0) + + def test_delete_comment_in_community_post_reduces_community_activity_score(self): + """ + should reduce community activity score on delete comment in community post and return 200 + """ + user = make_user() + community = make_community(creator=user) + post = user.create_community_post(text=make_fake_post_text(), community_name=community.name) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + activity_score_before_delete = community.activity_score + + post_comment = user.comment_post_with_id(post.pk, text=make_fake_post_comment_text()) + + url = self._get_url(post_comment=post_comment, post=post) + + headers = make_authentication_headers_for_user(user) + response = self.client.delete(url, **headers) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + self._clear_jobs_in_scheduler() + self.assertEqual(community.activity_score, activity_score_before_delete) + + def test_delete_comment_in_community_post_one_by_one_reduces_community_activity_score_correctly(self): + """ + should reduce community activity score correctly on sequential job runs of delete comment in community post and return 200 + """ + user = make_user() + community = make_community(creator=user) + post = user.create_community_post(text=make_fake_post_text(), community_name=community.name) + + post_comment_1 = user.comment_post_with_id(post.pk, text=make_fake_post_comment_text()) + post_comment_2 = user.comment_post_with_id(post.pk, text=make_fake_post_comment_text()) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + activity_score_before_delete = community.activity_score + + # delete comment one + url = self._get_url(post_comment=post_comment_1, post=post) + headers = make_authentication_headers_for_user(user) + response = self.client.delete(url, **headers) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + + expected_activity_score_1 = activity_score_before_delete - settings.ACTIVITY_COUNT_COMMENTS_WEIGHT + self.assertEqual(community.activity_score, expected_activity_score_1) + + # delete comment two + url_2 = self._get_url(post_comment=post_comment_2, post=post) + headers = make_authentication_headers_for_user(user) + response = self.client.delete(url_2, **headers) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + expected_activity_score_2 = expected_activity_score_1 - \ + settings.ACTIVITY_COUNT_COMMENTS_WEIGHT - \ + settings.ACTIVITY_UNIQUE_COMMENT_WEIGHT + + self._clear_jobs_in_scheduler() + self.assertEqual(community.activity_score, expected_activity_score_2) + + def _clear_jobs_in_scheduler(self): + default_scheduler = get_scheduler('process-activity-score') + for job in default_scheduler.get_jobs(): + default_scheduler.cancel(job.get_id()) + + def _get_url(self, post, post_comment): + return reverse('post-comment', kwargs={ + 'post_uuid': post.uuid, + 'post_comment_id': post_comment.pk + }) + + class MutePostCommentAPITests(OpenbookAPITestCase): """ MutePostCommentAPI diff --git a/openbook_posts/tests/views/test_post_comments.py b/openbook_posts/tests/views/test_post_comments.py index c1405298..89ebdb98 100644 --- a/openbook_posts/tests/views/test_post_comments.py +++ b/openbook_posts/tests/views/test_post_comments.py @@ -1,11 +1,17 @@ # Create your tests here. import json from django.urls import reverse +from django_rq import get_worker, get_scheduler +from django.conf import settings from faker import Faker from rest_framework import status from unittest import mock from unittest.mock import ANY -from openbook_common.tests.models import OpenbookAPITestCase + +from rq import SimpleWorker + +from openbook_common.tests.models import OpenbookAPITestCase, OpenbookAPITransactionTestCase +from unittest.mock import call import logging import random diff --git a/openbook_posts/tests/views/test_post_reaction.py b/openbook_posts/tests/views/test_post_reaction.py index 798bcea7..c87aaab7 100644 --- a/openbook_posts/tests/views/test_post_reaction.py +++ b/openbook_posts/tests/views/test_post_reaction.py @@ -1,8 +1,13 @@ # Create your tests here. from django.urls import reverse +from django.db import transaction +from django.conf import settings +from django_rq import get_worker from faker import Faker from rest_framework import status -from openbook_common.tests.models import OpenbookAPITestCase +from rq import SimpleWorker + +from openbook_common.tests.models import OpenbookAPITestCase, OpenbookAPITransactionTestCase import logging @@ -437,3 +442,80 @@ def _get_url(self, post, post_reaction): 'post_uuid': post.uuid, 'post_reaction_id': post_reaction.pk }) + + +class PostReactionItemTransactionAPITests(OpenbookAPITransactionTestCase): + """ + PostReactionItemTransactionsAPI + """ + + fixtures = [ + 'openbook_circles/fixtures/circles.json' + ] + + def test_delete_own_reaction_reduces_post_activity_score(self): + """ + should reduce activity score on delete own reaction in public post and return 200 + """ + user = make_user() + + foreign_user = make_user() + + post = foreign_user.create_public_post(text=make_fake_post_text()) + + emoji_group = make_reactions_emoji_group() + + post_reaction_emoji_id = make_emoji(group=emoji_group).pk + + with transaction.atomic(): + post_reaction = user.react_to_post_with_id(post.pk, emoji_id=post_reaction_emoji_id) + + url = self._get_url(post_reaction=post_reaction, post=post) + headers = make_authentication_headers_for_user(user) + response = self.client.delete(url, **headers) + + # run job to reduce activity score + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + post.refresh_from_db() + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(post.activity_score, 0.0) + + def test_delete_own_post_reaction_reduces_community_activity_score(self): + """ + should reduce community activity score on delete own reaction in community post and return 200 + """ + user = make_user() + + community = make_community(creator=user) + post = user.create_community_post(text=make_fake_post_text(), community_name=community.name) + + emoji_group = make_reactions_emoji_group() + + post_reaction_emoji_id = make_emoji(group=emoji_group).pk + + with transaction.atomic(): + post_reaction = user.react_to_post_with_id(post.pk, emoji_id=post_reaction_emoji_id) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + activity_score_before_delete = community.activity_score + + url = self._get_url(post_reaction=post_reaction, post=post) + headers = make_authentication_headers_for_user(user) + response = self.client.delete(url, **headers) + + # run job to reduce activity score + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(community.activity_score, + activity_score_before_delete - settings.ACTIVITY_UNIQUE_REACTION_WEIGHT) + + def _get_url(self, post, post_reaction): + return reverse('post-reaction', kwargs={ + 'post_uuid': post.uuid, + 'post_reaction_id': post_reaction.pk + }) diff --git a/openbook_posts/tests/views/test_post_reactions.py b/openbook_posts/tests/views/test_post_reactions.py index a78d167c..7598f88b 100644 --- a/openbook_posts/tests/views/test_post_reactions.py +++ b/openbook_posts/tests/views/test_post_reactions.py @@ -1,16 +1,20 @@ # Create your tests here. import json from django.urls import reverse +from django.conf import settings +from django_rq import get_worker from faker import Faker from rest_framework import status -from openbook_common.tests.models import OpenbookAPITestCase +from rq import SimpleWorker + +from openbook_common.tests.models import OpenbookAPITestCase, OpenbookAPITransactionTestCase import logging from openbook_common.tests.helpers import make_authentication_headers_for_user, make_fake_post_text, \ make_fake_post_comment_text, make_user, make_circle, make_emoji, make_emoji_group, make_reactions_emoji_group, \ make_community from openbook_notifications.models import PostReactionNotification -from openbook_posts.models import PostReaction +from openbook_posts.models import PostReaction, Post logger = logging.getLogger(__name__) fake = Faker() @@ -594,6 +598,77 @@ def _get_url(self, post): }) +class PostReactionsTransactionAPITests(OpenbookAPITransactionTestCase): + """ + PostReactionsTransactionAPI + """ + + fixtures = [ + 'openbook_circles/fixtures/circles.json' + ] + + def test_creating_post_reaction_updates_post_activity_score(self): + """ + should update activity score in post after successful reaction + """ + user = make_user() + headers = make_authentication_headers_for_user(user) + post = user.create_public_post(text=make_fake_post_text()) + + emoji_group = make_reactions_emoji_group() + + post_reaction_emoji_id = make_emoji(group=emoji_group).pk + + data = self._get_create_post_reaction_request_data(post_reaction_emoji_id, emoji_group.pk) + + url = self._get_url(post) + response = self.client.put(url, data, **headers) + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + post.refresh_from_db() + self.assertEqual(post.activity_score, settings.ACTIVITY_UNIQUE_REACTION_WEIGHT) + + def test_creating_post_reaction_updates_community_activity_score(self): + """ + should update activity score in community after successful community post reaction + """ + user = make_user() + headers = make_authentication_headers_for_user(user) + community = make_community(creator=user) + post = user.create_community_post(text=make_fake_post_text(), community_name=community.name) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + community.refresh_from_db() + + activity_score_before_reaction = community.activity_score + + emoji_group = make_reactions_emoji_group() + + post_reaction_emoji_id = make_emoji(group=emoji_group).pk + + data = self._get_create_post_reaction_request_data(post_reaction_emoji_id, emoji_group.pk) + + url = self._get_url(post) + response = self.client.put(url, data, **headers) + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + community.refresh_from_db() + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(community.activity_score, activity_score_before_reaction + settings.ACTIVITY_UNIQUE_REACTION_WEIGHT) + + def _get_create_post_reaction_request_data(self, emoji_id, emoji_group_id): + return { + 'emoji_id': emoji_id, + 'group_id': emoji_group_id + } + + def _get_url(self, post): + return reverse('post-reactions', kwargs={ + 'post_uuid': post.uuid + }) + + class PostReactionsEmojiCountAPITests(OpenbookAPITestCase): """ PostReactionsEmojiCountAPI diff --git a/openbook_posts/tests/views/test_posts.py b/openbook_posts/tests/views/test_posts.py index 6d5b4bc7..48c9f6a8 100644 --- a/openbook_posts/tests/views/test_posts.py +++ b/openbook_posts/tests/views/test_posts.py @@ -8,11 +8,12 @@ from django.core.files.uploadedfile import SimpleUploadedFile from django.urls import reverse from django_rq import get_worker +from django.db import transaction from faker import Faker from rest_framework import status from rq import SimpleWorker -from openbook_common.tests.models import OpenbookAPITestCase +from openbook_common.tests.models import OpenbookAPITestCase, OpenbookAPITransactionTestCase from mixer.backend.django import mixer from openbook.settings import POST_MAX_LENGTH @@ -3784,41 +3785,6 @@ class TrendingPostsAPITests(OpenbookAPITestCase): 'openbook_circles/fixtures/circles.json' ] - def test_displays_community_posts_only(self): - """ - should display community posts only and return 200 - """ - user = make_user() - community = make_community(creator=user) - - user.create_public_post(text=make_fake_post_text()) - post = user.create_community_post(community_name=community.name, text=make_fake_post_text()) - - headers = make_authentication_headers_for_user(user) - - emoji_group = make_reactions_emoji_group() - emoji = make_emoji(group=emoji_group) - - # react once, min required while testing - user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) - - curate_trending_posts() - - url = self._get_url() - - response = self.client.get(url, **headers, format='multipart') - - self.assertEqual(response.status_code, status.HTTP_200_OK) - - response_posts = json.loads(response.content) - - self.assertEqual(1, len(response_posts)) - - response_post = response_posts[0] - - self.assertEqual(response_post['post']['id'], post.pk) - self.assertTrue(TrendingPost.objects.filter(post__id=post.pk).exists()) - def test_does_not_curate_community_posts_with_less_than_min_reactions(self): """ should not curate community posts with less than minimum reactions and return 200 @@ -3830,6 +3796,7 @@ def test_does_not_curate_community_posts_with_less_than_min_reactions(self): post = user.create_community_post(community_name=community.name, text=make_fake_post_text()) headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) curate_trending_posts() @@ -3844,46 +3811,6 @@ def test_does_not_curate_community_posts_with_less_than_min_reactions(self): self.assertEqual(0, len(response_posts)) self.assertFalse(TrendingPost.objects.filter(post__id=post.pk).exists()) - def test_does_not_display_closed_community_posts(self): - """ - should not display community posts that are closed - """ - user = make_user() - community = make_community(creator=user) - - user.create_public_post(text=make_fake_post_text()) - post = user.create_community_post(community_name=community.name, text=make_fake_post_text()) - post_two = user.create_community_post(community_name=community.name, text=make_fake_post_text()) - post_two.is_closed = True - post_two.save() - - headers = make_authentication_headers_for_user(user) - - emoji_group = make_reactions_emoji_group() - emoji = make_emoji(group=emoji_group) - - # react once, min required while testing - user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) - user.react_to_post_with_id(post_id=post_two.pk, emoji_id=emoji.pk) - - curate_trending_posts() - - url = self._get_url() - - response = self.client.get(url, **headers, format='multipart') - - self.assertEqual(response.status_code, status.HTTP_200_OK) - - response_posts = json.loads(response.content) - - self.assertEqual(1, len(response_posts)) - - response_post = response_posts[0] - - self.assertEqual(response_post['post']['id'], post.pk) - self.assertFalse(TrendingPost.objects.filter(post__id=post_two.pk).exists()) - self.assertTrue(TrendingPost.objects.filter(post__id=post.pk).exists()) - def test_does_not_display_post_from_community_banned_from(self): """ should not display posts from a community banned from and return 200 @@ -3906,6 +3833,7 @@ def test_does_not_display_post_from_community_banned_from(self): community_name=community.name) headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) curate_trending_posts() @@ -3943,6 +3871,7 @@ def test_cant_retrieve_post_of_blocked_user(self): url = self._get_url() headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) response = self.client.get(url, **headers) @@ -3976,6 +3905,7 @@ def test_cant_retrieve_post_of_blocking_user(self): url = self._get_url() headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) response = self.client.get(url, **headers) @@ -4010,6 +3940,7 @@ def test_cant_retrieve_post_of_blocked_community_staff_member(self): url = self._get_url() headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) response = self.client.get(url, **headers) @@ -4044,6 +3975,7 @@ def test_does_not_curate_encircled_posts(self): curate_trending_posts() headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) url = self._get_url() @@ -4077,6 +4009,7 @@ def test_does_not_curate_private_community_posts(self): curate_trending_posts() headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) url = self._get_url() @@ -4091,28 +4024,83 @@ def test_does_not_curate_private_community_posts(self): self.assertEqual(0, len(trending_posts)) self.assertFalse(TrendingPost.objects.filter(post__id=post.pk).exists()) - def test_does_not_return_recently_turned_private_community_posts(self): + def _add_version_header(self, headers): + headers['HTTP_ACCEPT'] = 'application/json; version=2.0' + return headers + + def _get_url(self): + return reverse('trending-posts') + + +class TrendingPostsTransactionAPITests(OpenbookAPITransactionTestCase): + fixtures = [ + 'openbook_circles/fixtures/circles.json' + ] + + def test_displays_community_posts_only(self): """ - should not return recently turned private community posts in trending posts + should display community posts only and return 200 """ user = make_user() + community = make_community(creator=user) - community = make_community(creator=user, type=Community.COMMUNITY_TYPE_PUBLIC) + user.create_public_post(text=make_fake_post_text()) post = user.create_community_post(community_name=community.name, text=make_fake_post_text()) + headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) + emoji_group = make_reactions_emoji_group() emoji = make_emoji(group=emoji_group) # react once, min required while testing - user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) + with transaction.atomic(): + user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) - # curate trending posts + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) curate_trending_posts() - community.type = Community.COMMUNITY_TYPE_PRIVATE - community.save() + url = self._get_url() + + response = self.client.get(url, **headers, format='multipart') + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_posts = json.loads(response.content) + + self.assertEqual(1, len(response_posts)) + + response_post = response_posts[0] + + self.assertEqual(response_post['post']['id'], post.pk) + self.assertTrue(TrendingPost.objects.filter(post__id=post.pk).exists()) + + def test_does_not_display_closed_community_posts(self): + """ + should not display community posts that are closed + """ + user = make_user() + community = make_community(creator=user) + + user.create_public_post(text=make_fake_post_text()) + post = user.create_community_post(community_name=community.name, text=make_fake_post_text()) + post_two = user.create_community_post(community_name=community.name, text=make_fake_post_text()) + post_two.is_closed = True + post_two.save() headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) + + emoji_group = make_reactions_emoji_group() + emoji = make_emoji(group=emoji_group) + + # react once, min required while testing + with transaction.atomic(): + user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) + user.react_to_post_with_id(post_id=post_two.pk, emoji_id=emoji.pk) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + curate_trending_posts() url = self._get_url() @@ -4121,10 +4109,13 @@ def test_does_not_return_recently_turned_private_community_posts(self): self.assertEqual(response.status_code, status.HTTP_200_OK) response_posts = json.loads(response.content) - self.assertEqual(0, len(response_posts)) - trending_posts = TrendingPost.objects.all() - self.assertEqual(1, len(trending_posts)) + self.assertEqual(1, len(response_posts)) + + response_post = response_posts[0] + + self.assertEqual(response_post['post']['id'], post.pk) + self.assertFalse(TrendingPost.objects.filter(post__id=post_two.pk).exists()) self.assertTrue(TrendingPost.objects.filter(post__id=post.pk).exists()) def test_does_not_display_curated_closed_community_posts(self): @@ -4142,9 +4133,11 @@ def test_does_not_display_curated_closed_community_posts(self): emoji = make_emoji(group=emoji_group) # react once, min required while testing - user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) - user.react_to_post_with_id(post_id=post_two.pk, emoji_id=emoji.pk) + with transaction.atomic(): + user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) + user.react_to_post_with_id(post_id=post_two.pk, emoji_id=emoji.pk) + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) # curate trending posts curate_trending_posts() @@ -4152,6 +4145,7 @@ def test_does_not_display_curated_closed_community_posts(self): post_two.save() headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) url = self._get_url() @@ -4188,13 +4182,17 @@ def test_does_not_display_reported_community_posts_that_are_approved(self): emoji = make_emoji(group=emoji_group) # react once, min required while testing - user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) - user.react_to_post_with_id(post_id=post_two.pk, emoji_id=emoji.pk) + with transaction.atomic(): + user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) + user.react_to_post_with_id(post_id=post_two.pk, emoji_id=emoji.pk) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) # curate trending posts curate_trending_posts() headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) url = self._get_url() @@ -4234,8 +4232,11 @@ def test_does_not_display_reported_community_posts_that_are_approved_after_curat emoji = make_emoji(group=emoji_group) # react once, min required while testing - user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) - user.react_to_post_with_id(post_id=post_two.pk, emoji_id=emoji.pk) + with transaction.atomic(): + user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) + user.react_to_post_with_id(post_id=post_two.pk, emoji_id=emoji.pk) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) # curate trending posts curate_trending_posts() @@ -4243,6 +4244,7 @@ def test_does_not_display_reported_community_posts_that_are_approved_after_curat user.approve_moderated_object(moderated_object=moderated_object) headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) url = self._get_url() @@ -4254,8 +4256,52 @@ def test_does_not_display_reported_community_posts_that_are_approved_after_curat response_post = response_posts[0] self.assertEqual(response_post['post']['id'], post_two.pk) + def test_does_not_return_recently_turned_private_community_posts(self): + """ + should not return recently turned private community posts in trending posts + """ + user = make_user() + + community = make_community(creator=user, type=Community.COMMUNITY_TYPE_PUBLIC) + post = user.create_community_post(community_name=community.name, text=make_fake_post_text()) + + emoji_group = make_reactions_emoji_group() + emoji = make_emoji(group=emoji_group) + + # react once, min required while testing + with transaction.atomic(): + user.react_to_post_with_id(post_id=post.pk, emoji_id=emoji.pk) + + get_worker('process-activity-score', worker_class=SimpleWorker).work(burst=True) + + # curate trending posts + curate_trending_posts() + + community.type = Community.COMMUNITY_TYPE_PRIVATE + community.save() + + headers = make_authentication_headers_for_user(user) + headers = self._add_version_header(headers) + + url = self._get_url() + + response = self.client.get(url, **headers, format='multipart') + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_posts = json.loads(response.content) + self.assertEqual(0, len(response_posts)) + + trending_posts = TrendingPost.objects.all() + self.assertEqual(1, len(trending_posts)) + self.assertTrue(TrendingPost.objects.filter(post__id=post.pk).exists()) + + def _add_version_header(self, headers): + headers['HTTP_ACCEPT'] = 'application/json; version=2.0' + return headers + def _get_url(self): - return reverse('trending-posts-new') + return reverse('trending-posts') class TopPostsAPITests(OpenbookAPITestCase): diff --git a/openbook_posts/views/posts/views.py b/openbook_posts/views/posts/views.py index cae49189..89fdd7e5 100644 --- a/openbook_posts/views/posts/views.py +++ b/openbook_posts/views/posts/views.py @@ -114,18 +114,27 @@ def get_posts_for_unauthenticated_user(self, request): return Response(post_serializer.data, status=status.HTTP_200_OK) -class TrendingPosts(APIView): +class TrendingPostsLegacy(APIView): permission_classes = (IsAuthenticated, IsNotSuspended) def get(self, request): + query_params = request.query_params.dict() + + serializer = GetTrendingPostsSerializer(data=query_params) + serializer.is_valid(raise_exception=True) + data = serializer.validated_data + + max_id = data.get('max_id') + min_id = data.get('min_id') + count = data.get('count', 30) user = request.user - posts = user.get_trending_posts_old()[:30] - posts_serializer = AuthenticatedUserPostSerializer(posts, many=True, context={"request": request}) + trending_posts = user.get_trending_posts(max_id=max_id, min_id=min_id).order_by('-id')[:count] + posts_serializer = AuthenticatedUserTrendingPostSerializer(trending_posts, many=True, context={"request": request}) return Response(posts_serializer.data, status=status.HTTP_200_OK) -class TrendingPostsNew(APIView): +class TrendingPosts(APIView): permission_classes = (IsAuthenticated, IsNotSuspended) def get(self, request):