From 7e42fa9783b457dfaa9b1472ad5c942688bef52b Mon Sep 17 00:00:00 2001 From: Rodrigo Colao Merlo Date: Fri, 14 Feb 2025 12:46:07 -0300 Subject: [PATCH 01/14] feature: Add new patch script to update activity items strings to greek (M2-8568) (#1735) * Update dependencies in Pipfile and lock - Bumped pymongo to 4.11.* and pyOpenSSL to 25.0.* - Updated dev packages: gevent to 24.11.*, greenlet to 3.1.*, mypy to 1.15.*, types-pytz to 2025.1.0.* - Adjusted Python version markers for some packages - Refreshed hash values in the lock file Update deps * Add new patch script to update activity item to greek * Logging when age_screen or gender_screen item are created * Update src/apps/activities/services/activity.py Co-authored-by: Marty * Update README to add instructions to running patch --------- Co-authored-by: Marty --- Pipfile | 12 +- Pipfile.lock | 1173 ++++++++--------- README.md | 16 +- src/apps/activities/services/activity.py | 11 + src/apps/shared/commands/patch_commands.py | 17 +- .../m2_8568_update_subscale_items_to_greek.py | 115 ++ 6 files changed, 741 insertions(+), 603 deletions(-) create mode 100644 src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py diff --git a/Pipfile b/Pipfile index dcc39651404..83313f629cf 100644 --- a/Pipfile +++ b/Pipfile @@ -21,8 +21,8 @@ more-itertools = "==10.6.0" nh3 = "==0.2.20" pydantic = { extras = ["email"], version = "==1.10.18" } pyjwt = "==2.10.1" -pymongo = "==4.10.*" -pyOpenSSL = "==24.3.*" +pymongo = "==4.11.*" +pyOpenSSL = "==25.0.*" python-multipart = "==0.0.20" redis = "==5.2.*" sentry-sdk = "~=2.13" @@ -43,10 +43,10 @@ asgi-correlation-id = "==4.3.4" [dev-packages] allure-pytest = "==2.13.5" cachetools = "==5.3.0" -gevent = "==24.2.1" -greenlet = "==3.1.0" +gevent = "==24.11.*" +greenlet = "==3.1.*" ipdb = "==0.13.13" -mypy = "==1.14.*" +mypy = "==1.15.*" nest-asyncio = "==1.6.0" pre-commit = "==4.1.*" pudb = "==2024.1.3" @@ -63,7 +63,7 @@ ruff = "==0.9.*" types-aiofiles = "==24.1.0.*" types-cachetools = "==5.5.0.*" types-python-dateutil = "==2.9.0.*" -types-pytz = "==2024.2.0.*" +types-pytz = "==2025.1.0.*" types-requests = "==2.32.0.*" typing-extensions = "==4.12.2" diff --git a/Pipfile.lock b/Pipfile.lock index 59efbb16bc1..f53426068fa 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "586603f2208e3db6273f22cd99d4481bcb484bd23a82a73f213c4e0fad1d9b73" + "sha256": "b43b099cf0adf87ce4b9f1b19f00aefc85b9f93a63e10611066f8f4c82accc1f" }, "pipfile-spec": 6, "requires": { @@ -36,94 +36,99 @@ }, "aiohappyeyeballs": { "hashes": [ - "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745", - "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8" + "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1", + "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0" ], - "markers": "python_version >= '3.8'", - "version": "==2.4.4" + "markers": "python_version >= '3.9'", + "version": "==2.4.6" }, "aiohttp": { "hashes": [ - "sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f", - "sha256:0a6d3fbf2232e3a08c41eca81ae4f1dff3d8f1a30bae415ebe0af2d2458b8a33", - "sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1", - "sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665", - "sha256:0fd82b8e9c383af11d2b26f27a478640b6b83d669440c0a71481f7c865a51da9", - "sha256:10b4ff0ad793d98605958089fabfa350e8e62bd5d40aa65cdc69d6785859f94e", - "sha256:1642eceeaa5ab6c9b6dfeaaa626ae314d808188ab23ae196a34c9d97efb68350", - "sha256:1dac54e8ce2ed83b1f6b1a54005c87dfed139cf3f777fdc8afc76e7841101226", - "sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d", - "sha256:1f21bb8d0235fc10c09ce1d11ffbd40fc50d3f08a89e4cf3a0c503dc2562247a", - "sha256:2170816e34e10f2fd120f603e951630f8a112e1be3b60963a1f159f5699059a6", - "sha256:21fef42317cf02e05d3b09c028712e1d73a9606f02467fd803f7c1f39cc59add", - "sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e", - "sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8", - "sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03", - "sha256:3e23419d832d969f659c208557de4a123e30a10d26e1e14b73431d3c13444c2e", - "sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2", - "sha256:44167fc6a763d534a6908bdb2592269b4bf30a03239bcb1654781adf5e49caf1", - "sha256:479b8c6ebd12aedfe64563b85920525d05d394b85f166b7873c8bde6da612f9c", - "sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538", - "sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5", - "sha256:4eed954b161e6b9b65f6be446ed448ed3921763cc432053ceb606f89d793927e", - "sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9", - "sha256:568c1236b2fde93b7720f95a890741854c1200fba4a3471ff48b2934d2d93fd3", - "sha256:5854be2f3e5a729800bac57a8d76af464e160f19676ab6aea74bde18ad19d438", - "sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12", - "sha256:6526e5fb4e14f4bbf30411216780c9967c20c5a55f2f51d3abd6de68320cc2f3", - "sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853", - "sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287", - "sha256:731468f555656767cda219ab42e033355fe48c85fbe3ba83a349631541715ba2", - "sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9", - "sha256:84a585799c58b795573c7fa9b84c455adf3e1d72f19a2bf498b54a95ae0d194c", - "sha256:85992ee30a31835fc482468637b3e5bd085fa8fe9392ba0bdcbdc1ef5e9e3c55", - "sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c", - "sha256:88a12ad8ccf325a8a5ed80e6d7c3bdc247d66175afedbe104ee2aaca72960d8e", - "sha256:8be8508d110d93061197fd2d6a74f7401f73b6d12f8822bbcd6d74f2b55d71b1", - "sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c", - "sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194", - "sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773", - "sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e", - "sha256:943a8b052e54dfd6439fd7989f67fc6a7f2138d0a2cf0a7de5f18aa4fe7eb3b1", - "sha256:9d73ee3725b7a737ad86c2eac5c57a4a97793d9f442599bea5ec67ac9f4bdc3d", - "sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600", - "sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34", - "sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3", - "sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8", - "sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8", - "sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2", - "sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff", - "sha256:aba807f9569455cba566882c8938f1a549f205ee43c27b126e5450dc9f83cc62", - "sha256:ae545f31489548c87b0cced5755cfe5a5308d00407000e72c4fa30b19c3220ac", - "sha256:af01e42ad87ae24932138f154105e88da13ce7d202a6de93fafdafb2883a00ef", - "sha256:b540bd67cfb54e6f0865ceccd9979687210d7ed1a1cc8c01f8e67e2f1e883d28", - "sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab", - "sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104", - "sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76", - "sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e", - "sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d", - "sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a", - "sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5", - "sha256:bfde76a8f430cf5c5584553adf9926534352251d379dcb266ad2b93c54a29745", - "sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4", - "sha256:c7a06301c2fb096bdb0bd25fe2011531c1453b9f2c163c8031600ec73af1cc99", - "sha256:cb23d8bb86282b342481cad4370ea0853a39e4a32a0042bb52ca6bdde132df43", - "sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da", - "sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231", - "sha256:d6c9af134da4bc9b3bd3e6a70072509f295d10ee60c697826225b60b9959acdd", - "sha256:dd7659baae9ccf94ae5fe8bfaa2c7bc2e94d24611528395ce88d009107e00c6d", - "sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87", - "sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886", - "sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2", - "sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b", - "sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d", - "sha256:f047569d655f81cb70ea5be942ee5d4421b6219c3f05d131f64088c73bb0917f", - "sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204", - "sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e" + "sha256:0450ada317a65383b7cce9576096150fdb97396dcfe559109b403c7242faffef", + "sha256:0b5263dcede17b6b0c41ef0c3ccce847d82a7da98709e75cf7efde3e9e3b5cae", + "sha256:0d5176f310a7fe6f65608213cc74f4228e4f4ce9fd10bcb2bb6da8fc66991462", + "sha256:0ed49efcd0dc1611378beadbd97beb5d9ca8fe48579fc04a6ed0844072261b6a", + "sha256:145a73850926018ec1681e734cedcf2716d6a8697d90da11284043b745c286d5", + "sha256:1987770fb4887560363b0e1a9b75aa303e447433c41284d3af2840a2f226d6e0", + "sha256:246067ba0cf5560cf42e775069c5d80a8989d14a7ded21af529a4e10e3e0f0e6", + "sha256:2c311e2f63e42c1bf86361d11e2c4a59f25d9e7aabdbdf53dc38b885c5435cdb", + "sha256:2cee3b117a8d13ab98b38d5b6bdcd040cfb4181068d05ce0c474ec9db5f3c5bb", + "sha256:2de1378f72def7dfb5dbd73d86c19eda0ea7b0a6873910cc37d57e80f10d64e1", + "sha256:30f546358dfa0953db92ba620101fefc81574f87b2346556b90b5f3ef16e55ce", + "sha256:34245498eeb9ae54c687a07ad7f160053911b5745e186afe2d0c0f2898a1ab8a", + "sha256:392432a2dde22b86f70dd4a0e9671a349446c93965f261dbaecfaf28813e5c42", + "sha256:3c0600bcc1adfaaac321422d615939ef300df81e165f6522ad096b73439c0f58", + "sha256:4016e383f91f2814e48ed61e6bda7d24c4d7f2402c75dd28f7e1027ae44ea204", + "sha256:40cd36749a1035c34ba8d8aaf221b91ca3d111532e5ccb5fa8c3703ab1b967ed", + "sha256:413ad794dccb19453e2b97c2375f2ca3cdf34dc50d18cc2693bd5aed7d16f4b9", + "sha256:4a93d28ed4b4b39e6f46fd240896c29b686b75e39cc6992692e3922ff6982b4c", + "sha256:4ee84c2a22a809c4f868153b178fe59e71423e1f3d6a8cd416134bb231fbf6d3", + "sha256:50c5c7b8aa5443304c55c262c5693b108c35a3b61ef961f1e782dd52a2f559c7", + "sha256:525410e0790aab036492eeea913858989c4cb070ff373ec3bc322d700bdf47c1", + "sha256:526c900397f3bbc2db9cb360ce9c35134c908961cdd0ac25b1ae6ffcaa2507ff", + "sha256:54775858c7f2f214476773ce785a19ee81d1294a6bedc5cc17225355aab74802", + "sha256:584096938a001378484aa4ee54e05dc79c7b9dd933e271c744a97b3b6f644957", + "sha256:6130459189e61baac5a88c10019b21e1f0c6d00ebc770e9ce269475650ff7f73", + "sha256:67453e603cea8e85ed566b2700efa1f6916aefbc0c9fcb2e86aaffc08ec38e78", + "sha256:68d54234c8d76d8ef74744f9f9fc6324f1508129e23da8883771cdbb5818cbef", + "sha256:6dfe7f984f28a8ae94ff3a7953cd9678550dbd2a1f9bda5dd9c5ae627744c78e", + "sha256:74bd573dde27e58c760d9ca8615c41a57e719bff315c9adb6f2a4281a28e8798", + "sha256:7603ca26d75b1b86160ce1bbe2787a0b706e592af5b2504e12caa88a217767b0", + "sha256:76719dd521c20a58a6c256d058547b3a9595d1d885b830013366e27011ffe804", + "sha256:7c3623053b85b4296cd3925eeb725e386644fd5bc67250b3bb08b0f144803e7b", + "sha256:7e44eba534381dd2687be50cbd5f2daded21575242ecfdaf86bbeecbc38dae8e", + "sha256:7fe3d65279bfbee8de0fb4f8c17fc4e893eed2dba21b2f680e930cc2b09075c5", + "sha256:8340def6737118f5429a5df4e88f440746b791f8f1c4ce4ad8a595f42c980bd5", + "sha256:84ede78acde96ca57f6cf8ccb8a13fbaf569f6011b9a52f870c662d4dc8cd854", + "sha256:850ff6155371fd802a280f8d369d4e15d69434651b844bde566ce97ee2277420", + "sha256:87a2e00bf17da098d90d4145375f1d985a81605267e7f9377ff94e55c5d769eb", + "sha256:88d385b8e7f3a870146bf5ea31786ef7463e99eb59e31db56e2315535d811f55", + "sha256:8a2fb742ef378284a50766e985804bd6adb5adb5aa781100b09befdbfa757b65", + "sha256:8dc0fba9a74b471c45ca1a3cb6e6913ebfae416678d90529d188886278e7f3f6", + "sha256:8fa1510b96c08aaad49303ab11f8803787c99222288f310a62f493faf883ede1", + "sha256:8fd12d0f989c6099e7b0f30dc6e0d1e05499f3337461f0b2b0dadea6c64b89df", + "sha256:9060addfa4ff753b09392efe41e6af06ea5dd257829199747b9f15bfad819460", + "sha256:930ffa1925393381e1e0a9b82137fa7b34c92a019b521cf9f41263976666a0d6", + "sha256:936d8a4f0f7081327014742cd51d320296b56aa6d324461a13724ab05f4b2933", + "sha256:97fe431f2ed646a3b56142fc81d238abcbaff08548d6912acb0b19a0cadc146b", + "sha256:9bd8695be2c80b665ae3f05cb584093a1e59c35ecb7d794d1edd96e8cc9201d7", + "sha256:9dec0000d2d8621d8015c293e24589d46fa218637d820894cb7356c77eca3259", + "sha256:a478aa11b328983c4444dacb947d4513cb371cd323f3845e53caeda6be5589d5", + "sha256:a481a574af914b6e84624412666cbfbe531a05667ca197804ecc19c97b8ab1b0", + "sha256:a4ac6a0f0f6402854adca4e3259a623f5c82ec3f0c049374133bcb243132baf9", + "sha256:a5e69046f83c0d3cb8f0d5bd9b8838271b1bc898e01562a04398e160953e8eb9", + "sha256:a7442662afebbf7b4c6d28cb7aab9e9ce3a5df055fc4116cc7228192ad6cb484", + "sha256:aa8a8caca81c0a3e765f19c6953416c58e2f4cc1b84829af01dd1c771bb2f91f", + "sha256:ab3247d58b393bda5b1c8f31c9edece7162fc13265334217785518dd770792b8", + "sha256:b10a47e5390c4b30a0d58ee12581003be52eedd506862ab7f97da7a66805befb", + "sha256:b34508f1cd928ce915ed09682d11307ba4b37d0708d1f28e5774c07a7674cac9", + "sha256:b8d3bb96c147b39c02d3db086899679f31958c5d81c494ef0fc9ef5bb1359b3d", + "sha256:b9d45dbb3aaec05cf01525ee1a7ac72de46a8c425cb75c003acd29f76b1ffe94", + "sha256:bf4480a5438f80e0f1539e15a7eb8b5f97a26fe087e9828e2c0ec2be119a9f72", + "sha256:c160a04283c8c6f55b5bf6d4cad59bb9c5b9c9cd08903841b25f1f7109ef1259", + "sha256:c96a43822f1f9f69cc5c3706af33239489a6294be486a0447fb71380070d4d5f", + "sha256:c9fd9dcf9c91affe71654ef77426f5cf8489305e1c66ed4816f5a21874b094b9", + "sha256:cddb31f8474695cd61fc9455c644fc1606c164b93bff2490390d90464b4655df", + "sha256:ce1bb21fc7d753b5f8a5d5a4bae99566386b15e716ebdb410154c16c91494d7f", + "sha256:d1c031a7572f62f66f1257db37ddab4cb98bfaf9b9434a3b4840bf3560f5e788", + "sha256:d589264dbba3b16e8951b6f145d1e6b883094075283dafcab4cdd564a9e353a0", + "sha256:dc065a4285307607df3f3686363e7f8bdd0d8ab35f12226362a847731516e42c", + "sha256:e10c440d142fa8b32cfdb194caf60ceeceb3e49807072e0dc3a8887ea80e8c16", + "sha256:e3552fe98e90fdf5918c04769f338a87fa4f00f3b28830ea9b78b1bdc6140e0d", + "sha256:e392804a38353900c3fd8b7cacbea5132888f7129f8e241915e90b85f00e3250", + "sha256:e4cecdb52aaa9994fbed6b81d4568427b6002f0a91c322697a4bfcc2b2363f5a", + "sha256:e5148ca8955affdfeb864aca158ecae11030e952b25b3ae15d4e2b5ba299bad2", + "sha256:e6b2732ef3bafc759f653a98881b5b9cdef0716d98f013d376ee8dfd7285abf1", + "sha256:ea756b5a7bac046d202a9a3889b9a92219f885481d78cd318db85b15cc0b7bcf", + "sha256:edb69b9589324bdc40961cdf0657815df674f1743a8d5ad9ab56a99e4833cfdd", + "sha256:f0203433121484b32646a5f5ea93ae86f3d9559d7243f07e8c0eab5ff8e3f70e", + "sha256:f6a19bcab7fbd8f8649d6595624856635159a6527861b9cdc3447af288a00c00", + "sha256:f752e80606b132140883bb262a457c475d219d7163d996dc9072434ffb0784c4", + "sha256:f7914ab70d2ee8ab91c13e5402122edbc77821c66d2758abb53aabe87f013287" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==3.11.11" + "version": "==3.11.12" }, "aiormq": { "hashes": [ @@ -298,20 +303,20 @@ }, "boto3": { "hashes": [ - "sha256:641dd772eac111d9443258f0f5491c57c2af47bddae94a8d32de19edb5bf7b1c", - "sha256:b40fbf2c0f22e55b67df95475a68bb72be5169097180a875726b6b884339ac8b" + "sha256:084ff25af2d7bda3102d6367f5453e2e83f8cde1da73079ea144595b03cb9400", + "sha256:be8e32c34d7b103a64fafdd277fa1ec136733b4bbfc11dcfa597efa36a820b37" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==1.36.11" + "version": "==1.36.18" }, "botocore": { "hashes": [ - "sha256:82c5660027f696608d0e55feb08c146c11c7ebeba7615961c7765dcf6009a00d", - "sha256:c919be883f95b9e0c3021429a365d40cd7944b8345a07af30dc8d891ceefe07a" + "sha256:7898d109affd9231c555e71fda88308c1da2db8d39e83d33eb8cb40ebf1ba82f", + "sha256:ddadafe460e91f11677720a2fcc3ea09c4abb914de2b000da7ba46b4c97da3d7" ], "markers": "python_version >= '3.8'", - "version": "==1.36.11" + "version": "==1.36.18" }, "bytecode": { "hashes": [ @@ -527,111 +532,115 @@ }, "cryptography": { "hashes": [ - "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7", - "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731", - "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b", - "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc", - "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543", - "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c", - "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591", - "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede", - "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb", - "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f", - "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123", - "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c", - "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c", - "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285", - "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd", - "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092", - "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa", - "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289", - "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02", - "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64", - "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053", - "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417", - "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e", - "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e", - "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7", - "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756", - "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4" + "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7", + "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3", + "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183", + "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69", + "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a", + "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62", + "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911", + "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7", + "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a", + "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41", + "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83", + "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12", + "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864", + "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf", + "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c", + "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2", + "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b", + "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0", + "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4", + "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9", + "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008", + "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862", + "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009", + "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7", + "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f", + "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026", + "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f", + "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd", + "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420", + "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14", + "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00" ], "markers": "python_version >= '3.7' and python_full_version not in '3.9.0, 3.9.1'", - "version": "==44.0.0" + "version": "==44.0.1" }, "ddtrace": { "hashes": [ - "sha256:01f76fc9bf1413a188ddd59851eb3b668d3de936afed73a5914a817f36e11005", - "sha256:05b68ba6fe4da5317396100669edf91b3d54b95ae979a2a22ca880cfcc6c249d", - "sha256:0a1d7eaf6c9a5e0eabb8396f7d19faffc8f76a1ae37f34814c3432a9ca6f31da", - "sha256:0a5f092df1ee4d0afe96502866ff0fb446a07c6a23d445ed616f1302c883e1d3", - "sha256:0af78a424e9d87250a8648a35b7de5653f27b3f6f6803c1b33780816a07e6d26", - "sha256:0b121285459693ae9f3c9ce54cc4be981a2e73d4c52b8a5eb038cf41df9974dd", - "sha256:1197e89fcaaca4ce419e3c812d04c73b8d16973202472cf3f674b4698b967697", - "sha256:13e9b9a87c9df8a029fd3583ac660538dfa9d53207fee82b7f04749148bf8a3b", - "sha256:14fe4f583bec105c40e233c74fcbaea185658651c626ce1609750d83d93a00ae", - "sha256:1512d07e6c7cde13ae251906d57d31613fe5ee99fab2894e90679053b4256953", - "sha256:194973afa1e357c68b6a6eceaabbddcef01130d167775126c2a15b3c1827f683", - "sha256:1cd1b240f465c98e226ab896b1b1c3696752d5eb1051a0aafb8a3db701d2ddc1", - "sha256:2723378e670d27927f7d1ab878c8668fc392a5656a66453b9808e7c4025431fd", - "sha256:301a7787a34113b56cc9067a593c4717f6e158f2393883c30b59d0a37ebc06fa", - "sha256:33e4406a436278722df193c93d49e662a8891e8a440fddb273dca9a56fa27947", - "sha256:37693555704a0cbd4a925e4ffe9c6df696146c85557c5f66ce3a7a88406772d9", - "sha256:37995326df811236c9d92d1b5713378a7f11270bf1e21c64914653a3e12d7d01", - "sha256:3a300515c3327af4fd5c6c83e6ca63cd0a20e4243381d4b712e3f406d9ddf201", - "sha256:3a615ae95ef8f889304b2958655ac8cda23cf2f2c8faf5d8ff88bd14bdcf3fb4", - "sha256:3f526e75d1b61019db2cd715e8c7298325e21c3584f8677a7decf37aa81e7284", - "sha256:44bcd604a1d9095f4eb6813db6a677a208bd47884aff9ddc5aa46f596765f38e", - "sha256:4902b64ba89a8e4008228e7a5007e20b2bb8071c6c7689abd47dddc159e2baf1", - "sha256:4c840dc91c622138a4a6abdbcbee56897d4c55d9e7bf16b1902ee676f20b22f2", - "sha256:4d31d769f222c9a5bde1d4a594da1341014bf02db1a7194f5a41ed7e5c4c9fa8", - "sha256:4fca20220bf668ebcac7051d28648b62aa95d2afeb5036ecad167cb454c7dcf1", - "sha256:4fd7ec6410225e64ca875d1bc9bd6bd8489f52dd8558462fbb52447fb8f66ad3", - "sha256:54068dbf034e1f607ef5d58a9fa1b26bd78b4f3db0618ce0a3e9c4b04fff7209", - "sha256:567054d0c01dc552a8a24c4c9eeb98d778be720d2c4c9536acf1b86d6969a5e4", - "sha256:569d7a92a88ba9b2a203addea688b0585901534c92c2d148ef9f971b6d7b3805", - "sha256:57864590ddb84da4eb99332fe4b521e6fd747992178e3eabcf4f87406e908bb1", - "sha256:62e86d9b00277fe2b9bdfbc51ca1bc34aa5c1200aa6bc5084c7eaaab28b022de", - "sha256:698207d88941ea3e4e5f3add6071e6651caa12fcffe079359507391382251759", - "sha256:6f89e6003a738800beeb9948c847366976c73de2e24cc469b644a125f418c0a8", - "sha256:758ba828eddd144a4804af692869e7278376efa740932a7453e8fdc0ed6ef6a7", - "sha256:7f37966012078151713d61382de2a7ed710a8f375820e0db41930436b813b651", - "sha256:879b963baf0b97d5ddd26185ab496b79efd25bbfdc2c25dbb1113ec19dc5abf9", - "sha256:8d76ce49eb42588766db6756240d746441a66f03932547bfca9c62a8aecdbb38", - "sha256:91ab68c1facc6cf04085fdeea53210ed9928e95116809fd9bfe2dea54e83375d", - "sha256:96441189ccc772ef4871e23a35cec58a748c16ebfb2293eccaaa719dcbc368fd", - "sha256:9d209bef14caafcd53be8c14e04741d86c08f76496c1bf755e2eaa38605ce3e0", - "sha256:9f95a251f84725df055202d933b4a8fb39cefd51341e10cad17d2a8b4d64700e", - "sha256:a4a1da150e92b6d43047f2a91a7d3b7133207f94b82613625abf82662359b30e", - "sha256:a523924382d2f8f87731e4ad86bbf4721fba9eb807ed3b0c862db6d768e1e81c", - "sha256:a71d2fc753b8fa2c3435b6f4d5f820d8deb51f49df59a4886b4da68b67f923d3", - "sha256:a84d2e2411496c1b4ca3ce0cfb407d186cb3d13f1769c321fec30e677d815cd8", - "sha256:ab27596e82bdfe9c0c5580f6886ec943ae2fe615a446c22470f6a1f9742dec62", - "sha256:ab7f1babd7a8e73edf70c957a5b3bbeb4c615b232a078a0fe4da566e1663d1aa", - "sha256:ae5e802aaefc0cd3b63e3fd46e051a39e9f960a02e89f44a5bb820f445b736f9", - "sha256:b90333661ffd3460bae6dbbd7a5f35d8467cff36bd7a689a47b014edb19c0fe6", - "sha256:b95f14f0634fe3f02dcebb7b8a124207b3d44168fd0dfc6bfff1e4db93978089", - "sha256:b9d358bc7b4b1daa2d6e6bc697244b39db653ddd5ae139045941d3db28950bfe", - "sha256:bb77464849b092f93839d5c257df9eaeb018521ddea2deef97dfc6e3501a2516", - "sha256:c74d69e6a4cbd91f6fe411519d753f34893d6d40a68829b43485690df8a7f30f", - "sha256:c83c2458de1cf1bbac48689c6541a85d54ad94ae6608961e1089cc2959a8c77a", - "sha256:cfbc926ddfeece0312e82f9e00a68001647666d11ceb050a5bbe66ca8931e3d2", - "sha256:d1cec952ce2ca9efbb34c8a9ee522e1cc588fe454b9115120b30fd5d0f821512", - "sha256:d39f96e2fdfdf7ab43ee89a20d914f5ab12f8f361c390663eacb0e5da6e6e7fb", - "sha256:da47746a24242d805a800bca0a10b1354353e18b0bc4b6caf9c9c1724ba286b0", - "sha256:dcadc60aa60e11f2db56065a834aaa5e52a9be02e8edc8d14aa7015fb54092ce", - "sha256:df413d646fc14b4be51a15ed8e484bcdf3b21e370e2644a586283bcc7e0b2355", - "sha256:e1dee099099b95acf7d0e552179925cfec58a52315cc914d153506367b195bc4", - "sha256:e2de7371291283cba1afdd7e919a577637099da0d6872d33b4008e1cad6e1b8b", - "sha256:eb76547a4bbf285d03ffc6e065bbad61761954741da2df0e4683d68d46ef2159", - "sha256:eeca6b6bd48794d48de438981dccbc96a06366edde798c12b2b3348ca5e03717", - "sha256:f185c6dd88cd04884f0ad27b37f14d837274e8fc4dc43407781334d92d41c3bc", - "sha256:f1ce886b112e0e8aa66ba8cf3593f00f7f6ae6b48dd09bd8ce90c59adad59d66", - "sha256:f77103d36e6ab52cb45781766620d3874d0a728565afa7fd57f8ce2d5087e8e7", - "sha256:fa4f90f472768aef8ce023a924505c9d1d09428fc9d6ab81bc0e3ab183e3ff48" + "sha256:04f47dc02bc08040a700863bb2ee148538aab90444dfffd648dcb03ed61dbc94", + "sha256:055cc0469c5b7ffa7215d7a319665396e409cdf2e3654356c750db9936996f84", + "sha256:06d6c6f1353c7f8316dc3cecab4ac0878153b8cf6dd681290888fadbab08e23b", + "sha256:0c5912dd02492a40f6248893e5c530862e407091ed13a4348e6d0f890b96613d", + "sha256:0d6f8d1b55c1247e75d35989c7851e3f3a602fdf0f5da4c3707c337c5e98e705", + "sha256:0fb6eab37784e2fe6fad2a8ca7ad057a037a0e2fff69b006760d03823bcbd623", + "sha256:1063a79090f96507764de7a20416b58f74d77ff9e9b11b3a26da06387eb6bfac", + "sha256:11d4c7df61b614359b690cbde2cc9e53ae69a14af1cb18b8f5e9de637255f062", + "sha256:13a3e8cefcc4573f1b73d79c1b457da0510cd179db992ee3129decdda80d2a32", + "sha256:1569090a99f73e367019a35720ec3305e0514589bedf20bd54486ab8977f04d8", + "sha256:1db91f8c7b49f21ad47aa151431f971c1f6952a29dce0d8d6e68ef81f0c08bcf", + "sha256:27e5b3b51774e02335f4d3c772b2ce645f610279322fcf18ca7ee27d3e57b411", + "sha256:2a3d48f43015e7083a987123a455d96a674f22367c609bc363cdd3698fdf74a8", + "sha256:33025cb8db1ce46702ad02fa62fe02aee68624b076ee9af5f7c8c08e364c25a3", + "sha256:337cf3421afb384c5677afdbd8cbc31d2341a81f7c9a35196b7e3c0ec2d17c98", + "sha256:366bcda11eed3a61e3c4e3eb374f54acd5d6a719ca1afaa3a47479da6d8b3a59", + "sha256:381e399c2997bed25d39fa4dee24f5a43678a5a560f7e8c00bd5e5bed1617da4", + "sha256:39e1ee073f66b770af4933a0c77aa7acb2b73eabe16a6738f38f671b2ffbdaa7", + "sha256:3bfc86ab116b7239e52edc80abee6ae48e80ad8bed265097ad0ab167bded22cc", + "sha256:3d40a523eeefb0798f9a2f3550b98d5ac46365188a59e566c8dd57f5535d9184", + "sha256:3e43fd98394fe66b1429098aa1313087745f402a343d7d28b00246874cf6a181", + "sha256:4de0a6c26edb2edb08112421b53064de923fa4ba79ad2396cbd01e5f605229fc", + "sha256:509452db2634b7d811b73a66dd4fc0acd4450ff2c11616ae43161d0da7c6ad7f", + "sha256:50cb9899e0aa1bbd09c4e02ea7c9411fd5bfea2c2738f201b02c9437cb279bc0", + "sha256:56258219495e567e558fcfbcb73a07c7aaa9d4b7f18ec889fb5b52ccf4bf5323", + "sha256:56e397c5bae1a1d4e46de2ebe310b8be2d80dc45c3b985149bfa0a9e0f473616", + "sha256:5ad2a4d6e9e22f1f6fbcfbaeb7936d9863edfbd65d348d5926c8c41c707bf498", + "sha256:62aebca787a8f327f47631f42cdf7637fc23a292c1dcd6b98d7534ebbb4f2c92", + "sha256:631c23642e3685831f4486ac17661c232e8adf18146f1cd3511b54e60e46f81a", + "sha256:6580938362752889e08b10e07d9d148c20de62faf6fa55fbe93893d889f848f6", + "sha256:6e21e16c3c4d9521764358e274375cd48417b25d7c5ea34ef4eb87af0049cedb", + "sha256:6f189f0bdf939efdb6d5d0240bd49f4cc767525dd2591bd229d9aea199b60ba8", + "sha256:737e1a185637a5593869ab89b4a781b8a7c0dfa6f6e413fdecad6a3a67e41e55", + "sha256:73ad9b316fcba7731aa80793dfdeddc0ca213058af0a0d8e371ee3b6602086d8", + "sha256:73bc42499a4dafaddc4b8d1f489492ec92e1b9616cf6bc2e17d17ef037ee866b", + "sha256:771f3e00c866a45a3a73fec6da114fa209e3d186f0b3f9d10fd772c242c47bb3", + "sha256:7917c79aa07485985320dc51a9c7e66dca2ae85c1070e1c6f8163d2cc33d4ec3", + "sha256:7a9129a253a01071ab71145de9e3ba0aaf0b1d6394c88cfd8b853c30638a3280", + "sha256:7d11366501569a5617afe8fd92da048dac4ccbef01238c66e2bb5707ce8a280a", + "sha256:80942e5eabb2b1e048bdc5a66da0e5ab9b2524357a2a16a056af3cfe57e16c35", + "sha256:8138c50dad1fc0ee71fb41fb1caefb0097655b1aa05467d01cfd736753d360f1", + "sha256:856591738259c1f4e6ffcdc08dc06f7793c28f89a25a1393879f4d622e045a81", + "sha256:85980bdb1c433186d16518ec3b0a0e9f4a7e46599d7a24a0d68dab091d717421", + "sha256:8836ce031c11bdeda3e14f19fcfeac6c23dba8cdc82d880ddac64bc2f53ec503", + "sha256:8900953c6c86fc5ec403a31970e94f01a9bb506a5c6b5588cd64537e47d5b9eb", + "sha256:8bfd861190636c98e722937a22ee7af297c8e35591f7d5c5fb5b4c0b9bb928aa", + "sha256:8dbe3736571f4443033de880e9025824c3cabe2c4bcce7590cb8d43007d0218d", + "sha256:9131801116ddb88fad2f8df5dd18162c27a1243243899c9dd8248fe3b9d09011", + "sha256:9550ea3857a6b0b99983c8d395d3c2ef0d96e208a172f99b2b7c3aae31d4e6a2", + "sha256:9c38c5072d8c77495c9bc483bab2c28f7c53faf8d38c215605e1a3177a952cdb", + "sha256:a03ebb1d49702d9560bc7962cd6a82965c5ddea181e8d1379de7ecde94a60555", + "sha256:a4f2b49f920e4f9abed6b0117cdc8f94314a67bd3b670908f5cb26c22f34c64f", + "sha256:b4504ed689217dfb2ebc50b6285f13060150caa883efeb75d3cedf547670f499", + "sha256:bd2d68155fac1c52eba24fdbcd57e9cdb361ae8ba73f703162375937cb19848d", + "sha256:be543a6cfe1e12e09df1b9c1f9908f327a401e4f8715e4c2d592665188f863fd", + "sha256:c4bb0ab10cb4bcf57adf48de42c4c442ca7fd9b4bae35fde1d3b786c5f892404", + "sha256:c4c36d64c8ba5a9d5a22b3d8f4c5dcddee55515055bb51d587bff176d74a81fd", + "sha256:c757449b77c51f8d34ee77937e54930e026a3cd0f26bf4bc12b9e311918906eb", + "sha256:ca89814e75db5dfc8e149a676c83926bb89ab3a66699bf4503159d4de0401a97", + "sha256:ce870a1e02b314a53bd91330e9f3b2e8c7f77e76b8ca498aaf00ffea7dc32a4a", + "sha256:dd2d4a688dbe5b9e2f51bc6925329aaad2fce6ada5584a72ec3732c41d680699", + "sha256:dd5fe63ca7ae57cc577ad5a106b78c68d0ab0cfb855c9c949304a909f8ab4391", + "sha256:de8b77eb3f1149956ef8cc44f287e44870dd8bf3d39ddbf9bd60319ec5db4831", + "sha256:df3ad61d27e87433d60bbca635d46823876f6dc6d77a27dc71fbce354c337b57", + "sha256:e563f13b4e575a8c518036bd97db280adba8314ed65a1297f135b8d10456a8f1", + "sha256:e5727eef2af0c0de0376fe293b1e0b1db03f21f0fbf5614b29f3a1982fd11bc9", + "sha256:e828cb561ba89d442dc3ddf472a0ccaf30e0148919da9adb5bc16890f2173b27", + "sha256:f71aed982ce37949a60bda007f59cea2bfa23ecf8f2a03c805614885ca2ce645" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==2.20.0" + "version": "==2.20.1" }, "deprecated": { "hashes": [ @@ -1192,11 +1201,11 @@ }, "mako": { "hashes": [ - "sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627", - "sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8" + "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1", + "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac" ], "markers": "python_version >= '3.8'", - "version": "==1.3.8" + "version": "==1.3.9" }, "markdown-it-py": { "hashes": [ @@ -1491,11 +1500,11 @@ }, "opentelemetry-api": { "hashes": [ - "sha256:5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8", - "sha256:d04a6cf78aad09614f52964ecb38021e248f5714dc32c2e0d8fd99517b4d69cf" + "sha256:375893400c1435bf623f7dfb3bcd44825fe6b56c34d0667c542ea8257b1a1240", + "sha256:d5f5284890d73fdf47f843dda3210edf37a38d66f44f2b5aedc1e89ed455dc09" ], "markers": "python_version >= '3.8'", - "version": "==1.29.0" + "version": "==1.30.0" }, "packaging": { "hashes": [ @@ -1732,78 +1741,76 @@ }, "pymongo": { "hashes": [ - "sha256:0783e0c8e95397c84e9cf8ab092ab1e5dd7c769aec0ef3a5838ae7173b98dea0", - "sha256:0f56707497323150bd2ed5d63067f4ffce940d0549d4ea2dfae180deec7f9363", - "sha256:11280809e5dacaef4971113f0b4ff4696ee94cfdb720019ff4fa4f9635138252", - "sha256:15a624d752dd3c89d10deb0ef6431559b6d074703cab90a70bb849ece02adc6b", - "sha256:15b1492cc5c7cd260229590be7218261e81684b8da6d6de2660cf743445500ce", - "sha256:1a970fd3117ab40a4001c3dad333bbf3c43687d90f35287a6237149b5ccae61d", - "sha256:1ec3fa88b541e0481aff3c35194c9fac96e4d57ec5d1c122376000eb28c01431", - "sha256:1ecc2455e3974a6c429687b395a0bc59636f2d6aedf5785098cf4e1f180f1c71", - "sha256:23e1d62df5592518204943b507be7b457fb8a4ad95a349440406fd42db5d0923", - "sha256:29e1c323c28a4584b7095378ff046815e39ff82cdb8dc4cc6dfe3acf6f9ad1f8", - "sha256:2e3a593333e20c87415420a4fb76c00b7aae49b6361d2e2205b6fece0563bf40", - "sha256:345f8d340802ebce509f49d5833cc913da40c82f2e0daf9f60149cacc9ca680f", - "sha256:3a70d5efdc0387ac8cd50f9a5f379648ecfc322d14ec9e1ba8ec957e5d08c372", - "sha256:409ab7d6c4223e5c85881697f365239dd3ed1b58f28e4124b846d9d488c86880", - "sha256:442ca247f53ad24870a01e80a71cd81b3f2318655fd9d66748ee2bd1b1569d9e", - "sha256:45ee87a4e12337353242bc758accc7fb47a2f2d9ecc0382a61e64c8f01e86708", - "sha256:4924355245a9c79f77b5cda2db36e0f75ece5faf9f84d16014c0a297f6d66786", - "sha256:544890085d9641f271d4f7a47684450ed4a7344d6b72d5968bfae32203b1bb7c", - "sha256:57ee6becae534e6d47848c97f6a6dff69e3cce7c70648d6049bd586764febe59", - "sha256:594dd721b81f301f33e843453638e02d92f63c198358e5a0fa8b8d0b1218dabc", - "sha256:5ded27a4a5374dae03a92e084a60cdbcecd595306555bda553b833baf3fc4868", - "sha256:6131bc6568b26e7495a9f3ef2b1700566b76bbecd919f4472bfe90038a61f425", - "sha256:6f437a612f4d4f7aca1812311b1e84477145e950fdafe3285b687ab8c52541f3", - "sha256:6fb6a72e88df46d1c1040fd32cd2d2c5e58722e5d3e31060a0393f04ad3283de", - "sha256:70645abc714f06b4ad6b72d5bf73792eaad14e3a2cfe29c62a9c81ada69d9e4b", - "sha256:72e2ace7456167c71cfeca7dcb47bd5dceda7db2231265b80fc625c5e8073186", - "sha256:778ac646ce6ac1e469664062dfe9ae1f5c9961f7790682809f5ec3b8fda29d65", - "sha256:7bd26b2aec8ceeb95a5d948d5cc0f62b0eb6d66f3f4230705c1e3d3d2c04ec76", - "sha256:7c4d0e7cd08ef9f8fbf2d15ba281ed55604368a32752e476250724c3ce36c72e", - "sha256:88dc4aa45f8744ccfb45164aedb9a4179c93567bbd98a33109d7dc400b00eb08", - "sha256:8ad05eb9c97e4f589ed9e74a00fcaac0d443ccd14f38d1258eb4c39a35dd722b", - "sha256:90bc6912948dfc8c363f4ead54d54a02a15a7fee6cfafb36dc450fc8962d2cb7", - "sha256:9235fa319993405ae5505bf1333366388add2e06848db7b3deee8f990b69808e", - "sha256:93a0833c10a967effcd823b4e7445ec491f0bf6da5de0ca33629c0528f42b748", - "sha256:95207503c41b97e7ecc7e596d84a61f441b4935f11aa8332828a754e7ada8c82", - "sha256:9df4ab5594fdd208dcba81be815fa8a8a5d8dedaf3b346cbf8b61c7296246a7a", - "sha256:a920fee41f7d0259f5f72c1f1eb331bc26ffbdc952846f9bd8c3b119013bb52c", - "sha256:a9de02be53b6bb98efe0b9eda84ffa1ec027fcb23a2de62c4f941d9a2f2f3330", - "sha256:ae2fd94c9fe048c94838badcc6e992d033cb9473eb31e5710b3707cba5e8aee2", - "sha256:b3337804ea0394a06e916add4e5fac1c89902f1b6f33936074a12505cab4ff05", - "sha256:ba164e73fdade9b4614a2497321c5b7512ddf749ed508950bdecc28d8d76a2d9", - "sha256:bb99f003c720c6d83be02c8f1a7787c22384a8ca9a4181e406174db47a048619", - "sha256:ca6f700cff6833de4872a4e738f43123db34400173558b558ae079b5535857a4", - "sha256:cec237c305fcbeef75c0bcbe9d223d1e22a6e3ba1b53b2f0b79d3d29c742b45b", - "sha256:dabe8bf1ad644e6b93f3acf90ff18536d94538ca4d27e583c6db49889e98e48f", - "sha256:dac78a650dc0637d610905fd06b5fa6419ae9028cf4d04d6a2657bc18a66bbce", - "sha256:dcc07b1277e8b4bf4d7382ca133850e323b7ab048b8353af496d050671c7ac52", - "sha256:e0a15665b2d6cf364f4cd114d62452ce01d71abfbd9c564ba8c74dcd7bbd6822", - "sha256:e0e961923a7b8a1c801c43552dcb8153e45afa41749d9efbd3a6d33f45489f7a", - "sha256:e4a65567bd17d19f03157c7ec992c6530eafd8191a4e5ede25566792c4fe3fa2", - "sha256:e5d55f2a82e5eb23795f724991cac2bffbb1c0f219c0ba3bf73a835f97f1bb2e", - "sha256:e699aa68c4a7dea2ab5a27067f7d3e08555f8d2c0dc6a0c8c60cfd9ff2e6a4b1", - "sha256:e974ab16a60be71a8dfad4e5afccf8dd05d41c758060f5d5bda9a758605d9a5d", - "sha256:ee4c86d8e6872a61f7888fc96577b0ea165eb3bdb0d841962b444fa36001e2bb", - "sha256:f1945d48fb9b8a87d515da07f37e5b2c35b364a435f534c122e92747881f4a7c", - "sha256:f2bc1ee4b1ca2c4e7e6b7a5e892126335ec8d9215bcd3ac2fe075870fefc3358", - "sha256:fb104c3c2a78d9d85571c8ac90ec4f95bca9b297c6eee5ada71fabf1129e1674", - "sha256:fbedc4617faa0edf423621bb0b3b8707836687161210d470e69a4184be9ca011", - "sha256:fdeba88c540c9ed0338c0b2062d9f81af42b18d6646b3e6dda05cf6edd46ada9" + "sha256:0a8aba4818350d2a463e084ae2426d395e725525fe86bd0219240b265dc1ca52", + "sha256:1518931a4a26d3cb31a97b9187087c6378cd0b0401d7a7cc160e92223a2a3059", + "sha256:157e6a722d051c4bab3e6bc34a1f80fc98101cf2d12139a94e51638d023198c5", + "sha256:15a88b25efcd61c5e539e9204932849b20f393efa330771676e860c4466fe8ad", + "sha256:163c887384cb9fd16e0463128600867138a5a9a5344fc0903db08494b39a2d6e", + "sha256:18b669e15922316e25a318cf9ba594eae5a6c24285a70f455ea01571d70a47d2", + "sha256:1cc6d48b74e9abe544dd71b000453ad06e65cbfcfd57c7342a9f012f65532eb2", + "sha256:1ed3c885ac221ddebd3e894aeae7b6bd84e7dbd4fd59f03e551d8f51455c7e9b", + "sha256:1f871efa14a1f368559edff39ec03799ca108bfa8e1ba330b7ffc05eb958661f", + "sha256:25b7cadae1d5287b2eed3d901a347f3fa9bc3f898532e1cb7f28a1c9237d824d", + "sha256:2737ad54f0cd38e19ebf76e6f34dbbc6927615a2973425e64475d15a65fc2f6b", + "sha256:27bc58e0b1bebb17d2426d0cc191c579f2eeaf9692be880f93fe4180cf850ca7", + "sha256:2d1d956c15dd05f1e41c61f0dbcaec59f274db4814cff2c3d9c2508f58004c39", + "sha256:2d7f291245c1688655aa308bbba7c9afa8116692c4fa6ad2646a835ed277a67b", + "sha256:34d8b0ee57ad2a07ecdccec06269a4530767c2befb68f4a185113c866ad20b00", + "sha256:3757ce9257c3486eead45680a8895a0ed9ba27efaf1791fc0cf854367c21c638", + "sha256:3854db4be39cb9e0c34add1fd7e515deab0b4ee30f3cc3978e057746d119ac12", + "sha256:3b01623eb4a7ac58706e1920a94fbb47465f8ee19e7fbbb077e1707e37678863", + "sha256:3fe9589d9a83f6e2abe88f32daa410276eddd038eb8f8f75975cf8ce834cea1f", + "sha256:488d1da6201e1350cfcd4deab599b32237ac2ac591180d44553a2c8e614f2c0e", + "sha256:490d3fd8006154894319af3a974764bf16baea87100222779f49c75cd8b16d3d", + "sha256:4aa2c40e391ca29a337bef2b46b495c3f24b5696a87a58f0a0676a8bf131f9f8", + "sha256:50210249a9bf67937e97205a312b96a4b1250b111cbaaff532d7a61bc2b1562d", + "sha256:61f9a7ca6eb47378809c94cd8fbdbc5ee90c4bbb0c18ddf5592d25ed95cf939c", + "sha256:681806d3ecaf29b11e16a45c1f4c28f99d9d8283238f7b6ea9eee93b5d7bc6d2", + "sha256:698fb3d13126c0719077c98b40378cb9a6f4ab4a72b7691779aa01f1f6c66493", + "sha256:7007669eef871079d39a9bbcda0fbcd4252f9b575592804343d0b5c05849d65b", + "sha256:7146ae04300ce6f83b75c639e97c3d0ce873f30edaac4b719ae173e886b9ff90", + "sha256:74503e853758e1eaa1cad2df9c08c8c35a3d26222cf6426d2cde4b2e8593b9b3", + "sha256:7751e6e99c79057b09441c6ab2a93fae10b4028478aac5b455db8b12f884a3c0", + "sha256:7b3ea3494f3e166a524529bb05a4fdda97afd77031fed3a63862fd815288c9df", + "sha256:7dd7656794bfbfbe10723813332ec33eed29bd9bb7fc122c63829fd445eb8425", + "sha256:822a73d22970978a6e55751d53eb0948521fc8e1380e306b8644096b5230412f", + "sha256:889d20850d5aaa4f19814462c06488553e70ed4c62195dbaad5d5662884778af", + "sha256:892f2137282a0a993d342db6e4e6dc2f3db0b771831c2d505f7055c52c023198", + "sha256:8a4e82dce301c97bb132dec28a487c1a609dc67948e9db7cbd23485875367204", + "sha256:8ac125f2782d8fe3f3ff93a396af5482d694093b3be3e06052197096c83acadc", + "sha256:908e65ab42cd4bf1ffeaafe8f11bb86b3f804d54227058794e33fff2963ccc86", + "sha256:985a614ec24519f4a3d82aafb766c3f782a452fc46b32112d508a4e19b33fff3", + "sha256:9e7bac5fb1383a0df8b6881046207da20deb582a54e70c4c53ac9d4bbce323a3", + "sha256:a63348c850df796199abef7e9afbd86c34449f56731c7ec70b3901df1f5c135b", + "sha256:aadea45e01103f6ee4e80d76d4a27393a4e2bd93472ce4ebb894781f395e1053", + "sha256:b56dbb6883ce7adad8588464948e0723a3d881e5549f48c4767f1654e8e4cb7d", + "sha256:b630596089106c968ddd252bde3fe692c420e24f214dd39ca517d26343d81012", + "sha256:c71655f4188c70032ba56ac7ead688449e4f86a4ccd8e57201ee283f2f591e1d", + "sha256:cdd0e404d5c3b1203ee61fcfee40a1f062f3780ce272febdc2378797b00401d1", + "sha256:d12f4c4579076b7351c63378e22f43d4ce4ed4f2c93208b653c4752f18f47309", + "sha256:d293cec18624825937bd7f1d8bacf16104c79ced45a8ada93f08ec8a7a2ad17a", + "sha256:e147e08df329a7d23cbcb6213bc2fd360e51551626be828092fe2027f3473abc", + "sha256:e596caec72db62a3f438559dfa46d22faefea1967279f553f936ddcb873903df", + "sha256:e6e46bcd3c2f86f442b721551ed5e5812294e4a93fce42517e173bd41d4cd2d8", + "sha256:e7073a740aad257f9d2c12cb95a08f17db1f273d422e7ddfed9895738571cac7", + "sha256:f28d179e7d434869e23f4582c941cb400f75e996cfea472693ec756ee213c685", + "sha256:f415d9569720f408cc4dcc171f60299d454b0414f120666e6fdd349d414bf010", + "sha256:f845b46d77a5bcf0c9ee16f11c5bc84c63f4668d9ea4fc54cd923c8d48a1d521", + "sha256:f96683f1dec7d28f12fe43a4d5c0df35d6b80348a9fbf5aac47fa284332a1f92", + "sha256:f97f62e6edde15d1d3d08abd7e43f1787ee9e672b1bb8e9d9f5fd6ded24f5599" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==4.10.1" + "markers": "python_version >= '3.9'", + "version": "==4.11.1" }, "pyopenssl": { "hashes": [ - "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36", - "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a" + "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90", + "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==24.3.0" + "version": "==25.0.0" }, "pyparsing": { "hashes": [ @@ -1947,12 +1954,12 @@ }, "sentry-sdk": { "hashes": [ - "sha256:afa82713a92facf847df3c6f63cec71eb488d826a50965def3d7722aa6f0fdab", - "sha256:c359a1edf950eb5e80cffd7d9111f3dbeef57994cb4415df37d39fda2cf22364" + "sha256:7623cfa9e2c8150948a81ca253b8e2bfe4ce0b96ab12f8cd78e3ac9c490fd92f", + "sha256:a6d38e0fb35edda191acf80b188ec713c863aaa5ad8d5798decb8671d02077b6" ], "index": "pypi", "markers": "python_version >= '3.6'", - "version": "==2.20.0" + "version": "==2.21.0" }, "shellingham": { "hashes": [ @@ -2062,11 +2069,11 @@ "reload" ], "hashes": [ - "sha256:2b656c34231bf6947a6020098c94f93d6c45d15688d9e6bb8b3fbf54088e7717", - "sha256:d3ce40ccf399b38aa656614ecab6e5bbcc222b61fee654927822fc97ef4418a6" + "sha256:41c2f27355c90924e6fcd693aade7d21bb974a7bbc71bd54d3026cece9e806a2", + "sha256:c2c10eac817c230b338e0059873c75236efa38700495206424f0fae18228c426" ], "markers": "python_full_version >= '3.8.1' and python_full_version < '4.0.0'", - "version": "==0.11.10" + "version": "==0.11.11" }, "taskiq-aio-pika": { "hashes": [ @@ -2757,71 +2764,72 @@ "toml" ], "hashes": [ - "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9", - "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f", - "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273", - "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994", - "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e", - "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50", - "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e", - "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e", - "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c", - "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853", - "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8", - "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8", - "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe", - "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165", - "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb", - "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59", - "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609", - "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18", - "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098", - "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd", - "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3", - "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43", - "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d", - "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359", - "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90", - "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78", - "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a", - "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99", - "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988", - "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2", - "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0", - "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694", - "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377", - "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d", - "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23", - "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312", - "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf", - "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6", - "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b", - "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c", - "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690", - "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a", - "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f", - "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4", - "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25", - "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd", - "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852", - "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0", - "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244", - "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315", - "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078", - "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0", - "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27", - "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132", - "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5", - "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247", - "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022", - "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b", - "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3", - "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18", - "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5", - "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f" + "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95", + "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9", + "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe", + "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0", + "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924", + "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574", + "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702", + "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3", + "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b", + "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2", + "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea", + "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f", + "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3", + "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674", + "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9", + "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0", + "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e", + "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef", + "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb", + "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87", + "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1", + "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2", + "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703", + "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e", + "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd", + "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3", + "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4", + "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45", + "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa", + "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31", + "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8", + "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86", + "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6", + "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288", + "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf", + "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929", + "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc", + "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985", + "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3", + "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd", + "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e", + "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879", + "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57", + "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a", + "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad", + "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba", + "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d", + "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750", + "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c", + "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c", + "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f", + "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015", + "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558", + "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f", + "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d", + "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d", + "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425", + "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3", + "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953", + "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827", + "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c", + "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f", + "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73" ], "markers": "python_version >= '3.9'", - "version": "==7.6.10" + "version": "==7.6.12" }, "decorator": { "hashes": [ @@ -2855,11 +2863,11 @@ }, "faker": { "hashes": [ - "sha256:28c24061780f83b45d9cb15a72b8f143b09d276c9ff52eb557744b7a89e8ba19", - "sha256:609abe555761ff31b0e5e16f958696e9b65c9224a7ac612ac96bfc2b8f09fe35" + "sha256:aa0b93487d3adf7cd89953d172e3df896cb7b35d8a5222c0da873edbe2f7adf5", + "sha256:f40510350aecfe006f45cb3f8879b35e861367cf347f51a7f2ca2c0571fdcc0b" ], - "markers": "python_version >= '3.8'", - "version": "==35.2.0" + "markers": "python_version >= '3.9'", + "version": "==36.1.0" }, "filelock": { "hashes": [ @@ -2916,51 +2924,48 @@ }, "gevent": { "hashes": [ - "sha256:03aa5879acd6b7076f6a2a307410fb1e0d288b84b03cdfd8c74db8b4bc882fc5", - "sha256:117e5837bc74a1673605fb53f8bfe22feb6e5afa411f524c835b2ddf768db0de", - "sha256:141a2b24ad14f7b9576965c0c84927fc85f824a9bb19f6ec1e61e845d87c9cd8", - "sha256:14532a67f7cb29fb055a0e9b39f16b88ed22c66b96641df8c04bdc38c26b9ea5", - "sha256:1dffb395e500613e0452b9503153f8f7ba587c67dd4a85fc7cd7aa7430cb02cc", - "sha256:2955eea9c44c842c626feebf4459c42ce168685aa99594e049d03bedf53c2800", - "sha256:2ae3a25ecce0a5b0cd0808ab716bfca180230112bb4bc89b46ae0061d62d4afe", - "sha256:2e9ac06f225b696cdedbb22f9e805e2dd87bf82e8fa5e17756f94e88a9d37cf7", - "sha256:368a277bd9278ddb0fde308e6a43f544222d76ed0c4166e0d9f6b036586819d9", - "sha256:3adfb96637f44010be8abd1b5e73b5070f851b817a0b182e601202f20fa06533", - "sha256:3d5325ccfadfd3dcf72ff88a92fb8fc0b56cacc7225f0f4b6dcf186c1a6eeabc", - "sha256:432fc76f680acf7cf188c2ee0f5d3ab73b63c1f03114c7cd8a34cebbe5aa2056", - "sha256:44098038d5e2749b0784aabb27f1fcbb3f43edebedf64d0af0d26955611be8d6", - "sha256:5a1df555431f5cd5cc189a6ee3544d24f8c52f2529134685f1e878c4972ab026", - "sha256:6c47ae7d1174617b3509f5d884935e788f325eb8f1a7efc95d295c68d83cce40", - "sha256:6f947a9abc1a129858391b3d9334c45041c08a0f23d14333d5b844b6e5c17a07", - "sha256:782a771424fe74bc7e75c228a1da671578c2ba4ddb2ca09b8f959abdf787331e", - "sha256:7899a38d0ae7e817e99adb217f586d0a4620e315e4de577444ebeeed2c5729be", - "sha256:7b00f8c9065de3ad226f7979154a7b27f3b9151c8055c162332369262fc025d8", - "sha256:8f4b8e777d39013595a7740b4463e61b1cfe5f462f1b609b28fbc1e4c4ff01e5", - "sha256:90cbac1ec05b305a1b90ede61ef73126afdeb5a804ae04480d6da12c56378df1", - "sha256:918cdf8751b24986f915d743225ad6b702f83e1106e08a63b736e3a4c6ead789", - "sha256:9202f22ef811053077d01f43cc02b4aaf4472792f9fd0f5081b0b05c926cca19", - "sha256:94138682e68ec197db42ad7442d3cf9b328069c3ad8e4e5022e6b5cd3e7ffae5", - "sha256:968581d1717bbcf170758580f5f97a2925854943c45a19be4d47299507db2eb7", - "sha256:9d8d0642c63d453179058abc4143e30718b19a85cbf58c2744c9a63f06a1d388", - "sha256:a7ceb59986456ce851160867ce4929edaffbd2f069ae25717150199f8e1548b8", - "sha256:b9913c45d1be52d7a5db0c63977eebb51f68a2d5e6fd922d1d9b5e5fd758cc98", - "sha256:bde283313daf0b34a8d1bab30325f5cb0f4e11b5869dbe5bc61f8fe09a8f66f3", - "sha256:bf5b9c72b884c6f0c4ed26ef204ee1f768b9437330422492c319470954bc4cc7", - "sha256:ca80b121bbec76d7794fcb45e65a7eca660a76cc1a104ed439cdbd7df5f0b060", - "sha256:cdf66977a976d6a3cfb006afdf825d1482f84f7b81179db33941f2fc9673bb1d", - "sha256:d4faf846ed132fd7ebfbbf4fde588a62d21faa0faa06e6f468b7faa6f436b661", - "sha256:d7f87c2c02e03d99b95cfa6f7a776409083a9e4d468912e18c7680437b29222c", - "sha256:dd23df885318391856415e20acfd51a985cba6919f0be78ed89f5db9ff3a31cb", - "sha256:f5de3c676e57177b38857f6e3cdfbe8f38d1cd754b63200c0615eaa31f514b4f", - "sha256:f5e8e8d60e18d5f7fd49983f0c4696deeddaf6e608fbab33397671e2fcc6cc91", - "sha256:f7cac622e11b4253ac4536a654fe221249065d9a69feb6cdcd4d9af3503602e0", - "sha256:f8a04cf0c5b7139bc6368b461257d4a757ea2fe89b3773e494d235b7dd51119f", - "sha256:f8bb35ce57a63c9a6896c71a285818a3922d8ca05d150fd1fe49a7f57287b836", - "sha256:fbfdce91239fe306772faab57597186710d5699213f4df099d1612da7320d682" + "sha256:1c3443b0ed23dcb7c36a748d42587168672953d368f2956b17fad36d43b58836", + "sha256:1d4fadc319b13ef0a3c44d2792f7918cf1bca27cacd4d41431c22e6b46668026", + "sha256:1ea50009ecb7f1327347c37e9eb6561bdbc7de290769ee1404107b9a9cba7cf1", + "sha256:2142704c2adce9cd92f6600f371afb2860a446bfd0be5bd86cca5b3e12130766", + "sha256:351d1c0e4ef2b618ace74c91b9b28b3eaa0dd45141878a964e03c7873af09f62", + "sha256:356b73d52a227d3313f8f828025b665deada57a43d02b1cf54e5d39028dbcf8d", + "sha256:3d882faa24f347f761f934786dde6c73aa6c9187ee710189f12dcc3a63ed4a50", + "sha256:58851f23c4bdb70390f10fc020c973ffcf409eb1664086792c8b1e20f25eef43", + "sha256:68bee86b6e1c041a187347ef84cf03a792f0b6c7238378bf6ba4118af11feaae", + "sha256:7398c629d43b1b6fd785db8ebd46c0a353880a6fab03d1cf9b6788e7240ee32e", + "sha256:816b3883fa6842c1cf9d2786722014a0fd31b6312cca1f749890b9803000bad6", + "sha256:81d918e952954675f93fb39001da02113ec4d5f4921bf5a0cc29719af6824e5d", + "sha256:85329d556aaedced90a993226d7d1186a539c843100d393f2349b28c55131c85", + "sha256:8619d5c888cb7aebf9aec6703e410620ef5ad48cdc2d813dd606f8aa7ace675f", + "sha256:8bd1419114e9e4a3ed33a5bad766afff9a3cf765cb440a582a1b3a9bc80c1aca", + "sha256:92e0d7759de2450a501effd99374256b26359e801b2d8bf3eedd3751973e87f5", + "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e", + "sha256:97e2f3999a5c0656f42065d02939d64fffaf55861f7d62b0107a08f52c984897", + "sha256:9d3b249e4e1f40c598ab8393fc01ae6a3b4d51fc1adae56d9ba5b315f6b2d758", + "sha256:a3d75fa387b69c751a3d7c5c3ce7092a171555126e136c1d21ecd8b50c7a6e46", + "sha256:a5f1701ce0f7832f333dd2faf624484cbac99e60656bfbb72504decd42970f0f", + "sha256:b24d800328c39456534e3bc3e1684a28747729082684634789c2f5a8febe7671", + "sha256:b5efe72e99b7243e222ba0c2c2ce9618d7d36644c166d63373af239da1036bab", + "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870", + "sha256:beede1d1cff0c6fafae3ab58a0c470d7526196ef4cd6cc18e7769f207f2ea4eb", + "sha256:c6b775381f805ff5faf250e3a07c0819529571d19bb2a9d474bee8c3f90d66af", + "sha256:c9c935b83d40c748b6421625465b7308d87c7b3717275acd587eef2bd1c39546", + "sha256:ca845138965c8c56d1550499d6b923eb1a2331acfa9e13b817ad8305dde83d11", + "sha256:d618e118fdb7af1d6c1a96597a5cd6ac84a9f3732b5be8515c6a66e098d498b6", + "sha256:d6c0a065e31ef04658f799215dddae8752d636de2bed61365c358f9c91e7af61", + "sha256:d740206e69dfdfdcd34510c20adcb9777ce2cc18973b3441ab9767cd8948ca8a", + "sha256:d7886b63ebfb865178ab28784accd32f287d5349b3ed71094c86e4d3ca738af5", + "sha256:d9347690f4e53de2c4af74e62d6fabc940b6d4a6cad555b5a379f61e7d3f2a8e", + "sha256:d9ca80711e6553880974898d99357fb649e062f9058418a92120ca06c18c3c59", + "sha256:e24181d172f50097ac8fc272c8c5b030149b630df02d1c639ee9f878a470ba2b", + "sha256:ec68e270543ecd532c4c1d70fca020f90aa5486ad49c4f3b8b2e64a66f5c9274", + "sha256:f43f47e702d0c8e1b8b997c00f1601486f9f976f84ab704f8f11536e3fa144c9", + "sha256:ff96c5739834c9a594db0e12bf59cb3fa0e5102fc7b893972118a3166733d61c" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==24.2.1" + "markers": "python_version >= '3.9'", + "version": "==24.11.1" }, "greenlet": { "hashes": [ @@ -3051,11 +3056,11 @@ }, "identify": { "hashes": [ - "sha256:7bec12768ed44ea4761efb47806f0a41f86e7c0a5fdf5950d4648c90eca7e251", - "sha256:cbd1810bce79f8b671ecb20f53ee0ae8e86ae84b557de31d89709dc2a48ba881" + "sha256:155931cb617a401807b09ecec6635d6c692d180090a1cedca8ef7d58ba5b6aa0", + "sha256:3fa266b42eba321ee0b2bb0936a6a6b9e36a1351cbb69055b3082f4193035684" ], "markers": "python_version >= '3.9'", - "version": "==2.6.6" + "version": "==2.6.7" }, "idna": { "hashes": [ @@ -3100,147 +3105,147 @@ }, "lxml": { "hashes": [ - "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e", - "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229", - "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3", - "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5", - "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70", - "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15", - "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002", - "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd", - "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22", - "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf", - "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22", - "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832", - "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727", - "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e", - "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30", - "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f", - "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f", - "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51", - "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4", - "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de", - "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875", - "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42", - "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e", - "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6", - "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391", - "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc", - "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b", - "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237", - "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4", - "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86", - "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f", - "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a", - "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8", - "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f", - "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903", - "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03", - "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e", - "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99", - "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7", - "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab", - "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d", - "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22", - "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492", - "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b", - "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3", - "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be", - "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469", - "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f", - "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a", - "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c", - "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a", - "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4", - "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94", - "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442", - "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b", - "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84", - "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c", - "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9", - "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1", - "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be", - "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367", - "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e", - "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21", - "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa", - "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16", - "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d", - "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe", - "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83", - "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba", - "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040", - "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763", - "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8", - "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff", - "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2", - "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a", - "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b", - "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce", - "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c", - "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577", - "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8", - "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71", - "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512", - "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540", - "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f", - "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2", - "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a", - "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce", - "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e", - "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2", - "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27", - "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1", - "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d", - "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1", - "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330", - "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920", - "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99", - "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff", - "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18", - "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff", - "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c", - "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179", - "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080", - "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19", - "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d", - "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70", - "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32", - "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a", - "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2", - "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79", - "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3", - "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5", - "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f", - "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d", - "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3", - "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b", - "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753", - "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9", - "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957", - "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033", - "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb", - "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656", - "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab", - "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b", - "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d", - "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd", - "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859", - "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11", - "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c", - "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a", - "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005", - "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654", - "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80", - "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e", - "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec", - "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7", - "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965", - "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945", - "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8" + "sha256:016b96c58e9a4528219bb563acf1aaaa8bc5452e7651004894a973f03b84ba81", + "sha256:05123fad495a429f123307ac6d8fd6f977b71e9a0b6d9aeeb8f80c017cb17131", + "sha256:057e30d0012439bc54ca427a83d458752ccda725c1c161cc283db07bcad43cf9", + "sha256:06a20d607a86fccab2fc15a77aa445f2bdef7b49ec0520a842c5c5afd8381576", + "sha256:094b28ed8a8a072b9e9e2113a81fda668d2053f2ca9f2d202c2c8c7c2d6516b1", + "sha256:0bcfadea3cdc68e678d2b20cb16a16716887dd00a881e16f7d806c2138b8ff0c", + "sha256:0d6b2fa86becfa81f0a0271ccb9eb127ad45fb597733a77b92e8a35e53414914", + "sha256:0f2cfae0688fd01f7056a17367e3b84f37c545fb447d7282cf2c242b16262607", + "sha256:106b7b5d2977b339f1e97efe2778e2ab20e99994cbb0ec5e55771ed0795920c8", + "sha256:133f3493253a00db2c870d3740bc458ebb7d937bd0a6a4f9328373e0db305709", + "sha256:136bf638d92848a939fd8f0e06fcf92d9f2e4b57969d94faae27c55f3d85c05b", + "sha256:155e1a5693cf4b55af652f5c0f78ef36596c7f680ff3ec6eb4d7d85367259b2c", + "sha256:1637fa31ec682cd5760092adfabe86d9b718a75d43e65e211d5931809bc111e7", + "sha256:172d65f7c72a35a6879217bcdb4bb11bc88d55fb4879e7569f55616062d387c2", + "sha256:17b5d7f8acf809465086d498d62a981fa6a56d2718135bb0e4aa48c502055f5c", + "sha256:198bb4b4dd888e8390afa4f170d4fa28467a7eaf857f1952589f16cfbb67af27", + "sha256:1b6f92e35e2658a5ed51c6634ceb5ddae32053182851d8cad2a5bc102a359b33", + "sha256:1b92fe86e04f680b848fff594a908edfa72b31bfc3499ef7433790c11d4c8cd8", + "sha256:1bcc211542f7af6f2dfb705f5f8b74e865592778e6cafdfd19c792c244ccce19", + "sha256:1c93ed3c998ea8472be98fb55aed65b5198740bfceaec07b2eba551e55b7b9ae", + "sha256:203b1d3eaebd34277be06a3eb880050f18a4e4d60861efba4fb946e31071a295", + "sha256:22ec2b3c191f43ed21f9545e9df94c37c6b49a5af0a874008ddc9132d49a2d9c", + "sha256:231cf4d140b22a923b1d0a0a4e0b4f972e5893efcdec188934cc65888fd0227b", + "sha256:236610b77589faf462337b3305a1be91756c8abc5a45ff7ca8f245a71c5dab70", + "sha256:29bfc8d3d88e56ea0a27e7c4897b642706840247f59f4377d81be8f32aa0cfbf", + "sha256:2b8969dbc8d09d9cd2ae06362c3bad27d03f433252601ef658a49bd9f2b22d79", + "sha256:2dd0b80ac2d8f13ffc906123a6f20b459cb50a99222d0da492360512f3e50f84", + "sha256:2df7ed5edeb6bd5590914cd61df76eb6cce9d590ed04ec7c183cf5509f73530d", + "sha256:2e4a570f6a99e96c457f7bec5ad459c9c420ee80b99eb04cbfcfe3fc18ec6423", + "sha256:2f1be45d4c15f237209bbf123a0e05b5d630c8717c42f59f31ea9eae2ad89394", + "sha256:2f23cf50eccb3255b6e913188291af0150d89dab44137a69e14e4dcb7be981f1", + "sha256:3031e4c16b59424e8d78522c69b062d301d951dc55ad8685736c3335a97fc270", + "sha256:33e06717c00c788ab4e79bc4726ecc50c54b9bfb55355eae21473c145d83c2d2", + "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b", + "sha256:3715cdf0dd31b836433af9ee9197af10e3df41d273c19bb249230043667a5dfd", + "sha256:3bb8149840daf2c3f97cebf00e4ed4a65a0baff888bf2605a8d0135ff5cf764e", + "sha256:3c3c8b55c7fc7b7e8877b9366568cc73d68b82da7fe33d8b98527b73857a225f", + "sha256:3d68eeef7b4d08a25e51897dac29bcb62aba830e9ac6c4e3297ee7c6a0cf6439", + "sha256:3dddf0fb832486cc1ea71d189cb92eb887826e8deebe128884e15020bb6e3f61", + "sha256:3edbb9c9130bac05d8c3fe150c51c337a471cc7fdb6d2a0a7d3a88e88a829314", + "sha256:3effe081b3135237da6e4c4530ff2a868d3f80be0bda027e118a5971285d42d0", + "sha256:422c179022ecdedbe58b0e242607198580804253da220e9454ffe848daa1cfd2", + "sha256:42978a68d3825eaac55399eb37a4d52012a205c0c6262199b8b44fcc6fd686e8", + "sha256:4399b4226c4785575fb20998dc571bc48125dc92c367ce2602d0d70e0c455eb0", + "sha256:45fbb70ccbc8683f2fb58bea89498a7274af1d9ec7995e9f4af5604e028233fc", + "sha256:4867361c049761a56bd21de507cab2c2a608c55102311d142ade7dab67b34f32", + "sha256:48fd46bf7155def2e15287c6f2b133a2f78e2d22cdf55647269977b873c65499", + "sha256:4b0d5cdba1b655d5b18042ac9c9ff50bda33568eb80feaaca4fc237b9c4fbfde", + "sha256:4df0ec814b50275ad6a99bc82a38b59f90e10e47714ac9871e1b223895825468", + "sha256:4e52e1b148867b01c05e21837586ee307a01e793b94072d7c7b91d2c2da02ffe", + "sha256:514fe78fc4b87e7a7601c92492210b20a1b0c6ab20e71e81307d9c2e377c64de", + "sha256:524ccfded8989a6595dbdda80d779fb977dbc9a7bc458864fc9a0c2fc15dc877", + "sha256:528f3a0498a8edc69af0559bdcf8a9f5a8bf7c00051a6ef3141fdcf27017bbf5", + "sha256:52d82b0d436edd6a1d22d94a344b9a58abd6c68c357ed44f22d4ba8179b37629", + "sha256:5412500e0dc5481b1ee9cf6b38bb3b473f6e411eb62b83dc9b62699c3b7b79f7", + "sha256:585c4dc429deebc4307187d2b71ebe914843185ae16a4d582ee030e6cfbb4d8a", + "sha256:5865b270b420eda7b68928d70bb517ccbe045e53b1a428129bb44372bf3d7dd5", + "sha256:5881aaa4bf3a2d086c5f20371d3a5856199a0d8ac72dd8d0dbd7a2ecfc26ab73", + "sha256:5885bc586f1edb48e5d68e7a4b4757b5feb2a496b64f462b4d65950f5af3364f", + "sha256:5a11b16a33656ffc43c92a5343a28dc71eefe460bcc2a4923a96f292692709f6", + "sha256:5a997b784a639e05b9d4053ef3b20c7e447ea80814a762f25b8ed5a89d261eac", + "sha256:5be8f5e4044146a69c96077c7e08f0709c13a314aa5315981185c1f00235fe65", + "sha256:63d57fc94eb0bbb4735e45517afc21ef262991d8758a8f2f05dd6e4174944519", + "sha256:673b9d8e780f455091200bba8534d5f4f465944cbdd61f31dc832d70e29064a5", + "sha256:67d2f8ad9dcc3a9e826bdc7802ed541a44e124c29b7d95a679eeb58c1c14ade8", + "sha256:67f5e80adf0aafc7b5454f2c1cb0cde920c9b1f2cbd0485f07cc1d0497c35c5d", + "sha256:68018c4c67d7e89951a91fbd371e2e34cd8cfc71f0bb43b5332db38497025d51", + "sha256:6c4dd3bfd0c82400060896717dd261137398edb7e524527438c54a8c34f736bf", + "sha256:71f31eda4e370f46af42fc9f264fafa1b09f46ba07bdbee98f25689a04b81c20", + "sha256:7512b4d0fc5339d5abbb14d1843f70499cab90d0b864f790e73f780f041615d7", + "sha256:75fa3d6946d317ffc7016a6fcc44f42db6d514b7fdb8b4b28cbe058303cb6e53", + "sha256:779e851fd0e19795ccc8a9bb4d705d6baa0ef475329fe44a13cf1e962f18ff1e", + "sha256:796520afa499732191e39fc95b56a3b07f95256f2d22b1c26e217fb69a9db5b5", + "sha256:7aae7a3d63b935babfdc6864b31196afd5145878ddd22f5200729006366bc4d5", + "sha256:7b82e67c5feb682dbb559c3e6b78355f234943053af61606af126df2183b9ef9", + "sha256:7c0536bd9178f754b277a3e53f90f9c9454a3bd108b1531ffff720e082d824f2", + "sha256:7eda194dd46e40ec745bf76795a7cccb02a6a41f445ad49d3cf66518b0bd9cff", + "sha256:82a4bb10b0beef1434fb23a09f001ab5ca87895596b4581fd53f1e5145a8934a", + "sha256:85c4f11be9cf08917ac2a5a8b6e1ef63b2f8e3799cec194417e76826e5f1de9c", + "sha256:88b72eb7222d918c967202024812c2bfb4048deeb69ca328363fb8e15254c549", + "sha256:89934f9f791566e54c1d92cdc8f8fd0009447a5ecdb1ec6b810d5f8c4955f6be", + "sha256:8b1942b3e4ed9ed551ed3083a2e6e0772de1e5e3aca872d955e2e86385fb7ff9", + "sha256:8ffb141361108e864ab5f1813f66e4e1164181227f9b1f105b042729b6c15125", + "sha256:8fffc08de02071c37865a155e5ea5fce0282e1546fd5bde7f6149fcaa32558ac", + "sha256:91fb6a43d72b4f8863d21f347a9163eecbf36e76e2f51068d59cd004c506f332", + "sha256:928e75a7200a4c09e6efc7482a1337919cc61fe1ba289f297827a5b76d8969c2", + "sha256:96eef5b9f336f623ffc555ab47a775495e7e8846dde88de5f941e2906453a1ce", + "sha256:a0611da6b07dd3720f492db1b463a4d1175b096b49438761cc9f35f0d9eaaef5", + "sha256:a091026c3bf7519ab1e64655a3f52a59ad4a4e019a6f830c24d6430695b1cf6a", + "sha256:a22f66270bd6d0804b02cd49dae2b33d4341015545d17f8426f2c4e22f557a23", + "sha256:a243132767150a44e6a93cd1dde41010036e1cbc63cc3e9fe1712b277d926ce3", + "sha256:a31fa7536ec1fb7155a0cd3a4e3d956c835ad0a43e3610ca32384d01f079ea1c", + "sha256:a364e8e944d92dcbf33b6b494d4e0fb3499dcc3bd9485beb701aa4b4201fa414", + "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b", + "sha256:a4b382e0e636ed54cd278791d93fe2c4f370772743f02bcbe431a160089025c9", + "sha256:a83d3adea1e0ee36dac34627f78ddd7f093bb9cfc0a8e97f1572a949b695cb98", + "sha256:a8ade0363f776f87f982572c2860cc43c65ace208db49c76df0a21dde4ddd16e", + "sha256:aa59974880ab5ad8ef3afaa26f9bda148c5f39e06b11a8ada4660ecc9fb2feb3", + "sha256:aa826340a609d0c954ba52fd831f0fba2a4165659ab0ee1a15e4aac21f302406", + "sha256:aaca5a812f050ab55426c32177091130b1e49329b3f002a32934cd0245571307", + "sha256:ae82fce1d964f065c32c9517309f0c7be588772352d2f40b1574a214bd6e6098", + "sha256:aed57b541b589fa05ac248f4cb1c46cbb432ab82cbd467d1c4f6a2bdc18aecf9", + "sha256:afa578b6524ff85fb365f454cf61683771d0170470c48ad9d170c48075f86725", + "sha256:b0884e3f22d87c30694e625b1e62e6f30d39782c806287450d9dc2fdf07692fd", + "sha256:b2aca14c235c7a08558fe0a4786a1a05873a01e86b474dfa8f6df49101853a4e", + "sha256:b450d7cabcd49aa7ab46a3c6aa3ac7e1593600a1a0605ba536ec0f1b99a04322", + "sha256:b725e70d15906d24615201e650d5b0388b08a5187a55f119f25874d0103f90dd", + "sha256:bfbbab9316330cf81656fed435311386610f78b6c93cc5db4bebbce8dd146675", + "sha256:c093c7088b40d8266f57ed71d93112bd64c6724d31f0794c1e52cc4857c28e0e", + "sha256:c2e49dc23a10a1296b04ca9db200c44d3eb32c8d8ec532e8c1fd24792276522a", + "sha256:c4393600915c308e546dc7003d74371744234e8444a28622d76fe19b98fa59d1", + "sha256:c5ae125276f254b01daa73e2c103363d3e99e3e10505686ac7d9d2442dd4627a", + "sha256:c6aacf00d05b38a5069826e50ae72751cb5bc27bdc4d5746203988e429b385bb", + "sha256:c76722b5ed4a31ba103e0dc77ab869222ec36efe1a614e42e9bcea88a36186fe", + "sha256:c809eef167bf4a57af4b03007004896f5c60bd38dc3852fcd97a26eae3d4c9e6", + "sha256:c92ea6d9dd84a750b2bae72ff5e8cf5fdd13e58dda79c33e057862c29a8d5b50", + "sha256:cb659702a45136c743bc130760c6f137870d4df3a9e14386478b8a0511abcfca", + "sha256:ce0930a963ff593e8bb6fda49a503911accc67dee7e5445eec972668e672a0f0", + "sha256:d0751528b97d2b19a388b302be2a0ee05817097bab46ff0ed76feeec24951f78", + "sha256:d184f85ad2bb1f261eac55cddfcf62a70dee89982c978e92b9a74a1bfef2e367", + "sha256:d2a3e412ce1849be34b45922bfef03df32d1410a06d1cdeb793a343c2f1fd666", + "sha256:d61ec60945d694df806a9aec88e8f29a27293c6e424f8ff91c80416e3c617645", + "sha256:db0c742aad702fd5d0c6611a73f9602f20aec2007c102630c06d7633d9c8f09a", + "sha256:db4743e30d6f5f92b6d2b7c86b3ad250e0bad8dee4b7ad8a0c44bfb276af89a3", + "sha256:dbf7bebc2275016cddf3c997bf8a0f7044160714c64a9b83975670a04e6d2252", + "sha256:de1fc314c3ad6bc2f6bd5b5a5b9357b8c6896333d27fdbb7049aea8bd5af2d79", + "sha256:df7e5edac4778127f2bf452e0721a58a1cfa4d1d9eac63bdd650535eb8543615", + "sha256:e220f7b3e8656ab063d2eb0cd536fafef396829cafe04cb314e734f87649058f", + "sha256:e3c623923967f3e5961d272718655946e5322b8d058e094764180cdee7bab1af", + "sha256:e69add9b6b7b08c60d7ff0152c7c9a6c45b4a71a919be5abde6f98f1ea16421c", + "sha256:e8e0d177b1fe251c3b1b914ab64135475c5273c8cfd2857964b2e3bb0fe196a7", + "sha256:ef45f31aec9be01379fc6c10f1d9c677f032f2bac9383c827d44f620e8a88407", + "sha256:f1208c1c67ec9e151d78aa3435aa9b08a488b53d9cfac9b699f15255a3461ef2", + "sha256:f12582b8d3b4c6be1d298c49cb7ae64a3a73efaf4c2ab4e37db182e3545815ac", + "sha256:f1de541a9893cf8a1b1db9bf0bf670a2decab42e3e82233d36a74eda7822b4c9", + "sha256:f4eac0584cdc3285ef2e74eee1513a6001681fd9753b259e8159421ed28a72e5", + "sha256:f7b64fcd670bca8800bc10ced36620c6bbb321e7bc1214b9c0c0df269c1dddc2", + "sha256:fb7c61d4be18e930f75948705e9718618862e6fc2ed0d7159b2262be73f167a2" ], "markers": "python_version >= '3.6'", - "version": "==5.3.0" + "version": "==5.3.1" }, "matplotlib-inline": { "hashes": [ @@ -3252,48 +3257,42 @@ }, "mypy": { "hashes": [ - "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c", - "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd", - "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f", - "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0", - "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9", - "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b", - "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", - "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35", - "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319", - "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc", - "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb", - "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb", - "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", - "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60", - "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31", - "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f", - "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", - "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107", - "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", - "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a", - "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837", - "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6", - "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", - "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d", - "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255", - "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae", - "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", - "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8", - "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b", - "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac", - "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", - "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9", - "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1", - "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34", - "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427", - "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1", - "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c", - "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89" + "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", + "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", + "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f", + "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2", + "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f", + "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b", + "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5", + "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", + "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", + "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", + "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c", + "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828", + "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba", + "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", + "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", + "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b", + "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", + "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e", + "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13", + "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", + "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", + "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", + "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", + "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b", + "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", + "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559", + "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3", + "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f", + "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", + "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980", + "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078", + "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==1.14.1" + "markers": "python_version >= '3.9'", + "version": "==1.15.0" }, "mypy-extensions": { "hashes": [ @@ -3563,14 +3562,6 @@ "markers": "python_version >= '3.8'", "version": "==3.14.0" }, - "python-dateutil": { - "hashes": [ - "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", - "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==2.9.0.post0" - }, "pyyaml": { "hashes": [ "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff", @@ -3667,28 +3658,28 @@ }, "ruff": { "hashes": [ - "sha256:05bebf4cdbe3ef75430d26c375773978950bbf4ee3c95ccb5448940dc092408e", - "sha256:1d4c8772670aecf037d1bf7a07c39106574d143b26cfe5ed1787d2f31e800214", - "sha256:37c892540108314a6f01f105040b5106aeb829fa5fb0561d2dcaf71485021137", - "sha256:433dedf6ddfdec7f1ac7575ec1eb9844fa60c4c8c2f8887a070672b8d353d34c", - "sha256:54499fb08408e32b57360f6f9de7157a5fec24ad79cb3f42ef2c3f3f728dfe2b", - "sha256:56acd6c694da3695a7461cc55775f3a409c3815ac467279dfa126061d84b314b", - "sha256:585792f1e81509e38ac5123492f8875fbc36f3ede8185af0a26df348e5154f41", - "sha256:64e73d25b954f71ff100bb70f39f1ee09e880728efb4250c632ceed4e4cdf706", - "sha256:6907ee3529244bb0ed066683e075f09285b38dd5b4039370df6ff06041ca19e7", - "sha256:6ce6743ed64d9afab4fafeaea70d3631b4d4b28b592db21a5c2d1f0ef52934bf", - "sha256:87c90c32357c74f11deb7fbb065126d91771b207bf9bfaaee01277ca59b574ec", - "sha256:a6c634fc6f5a0ceae1ab3e13c58183978185d131a29c425e4eaa9f40afe1e6d6", - "sha256:bfc5f1d7afeda8d5d37660eeca6d389b142d7f2b5a1ab659d9214ebd0e025231", - "sha256:d612dbd0f3a919a8cc1d12037168bfa536862066808960e0cc901404b77968f0", - "sha256:db1192ddda2200671f9ef61d9597fcef89d934f5d1705e571a93a67fb13a4402", - "sha256:de9edf2ce4b9ddf43fd93e20ef635a900e25f622f87ed6e3047a664d0e8f810e", - "sha256:e0c93e7d47ed951b9394cf352d6695b31498e68fd5782d6cbc282425655f687a", - "sha256:faa935fc00ae854d8b638c16a5f1ce881bc3f67446957dd6f2af440a5fc8526b" + "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa", + "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a", + "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d", + "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656", + "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc", + "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba", + "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666", + "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5", + "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e", + "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6", + "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897", + "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9", + "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b", + "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83", + "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504", + "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08", + "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5", + "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==0.9.4" + "version": "==0.9.6" }, "setuptools": { "hashes": [ @@ -3750,12 +3741,12 @@ }, "types-pytz": { "hashes": [ - "sha256:06d7cde9613e9f7504766a0554a270c369434b50e00975b3a4a0f6eed0f2c1a9", - "sha256:8fc03195329c43637ed4f593663df721fef919b60a969066e22606edf0b53ad5" + "sha256:00f750132769f1c65a4f7240bc84f13985b4da774bd17dfbe5d9cd442746bd49", + "sha256:32ca4a35430e8b94f6603b35beb7f56c32260ddddd4f4bb305fdf8f92358b87e" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2024.2.0.20241221" + "markers": "python_version >= '3.9'", + "version": "==2025.1.0.20250204" }, "types-requests": { "hashes": [ @@ -3774,6 +3765,14 @@ "markers": "python_version >= '3.8'", "version": "==4.12.2" }, + "tzdata": { + "hashes": [ + "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694", + "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639" + ], + "markers": "python_version >= '2'", + "version": "==2025.1" + }, "url-normalize": { "hashes": [ "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2", @@ -3806,11 +3805,11 @@ }, "virtualenv": { "hashes": [ - "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779", - "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35" + "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728", + "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a" ], "markers": "python_version >= '3.8'", - "version": "==20.29.1" + "version": "==20.29.2" }, "wcwidth": { "hashes": [ diff --git a/README.md b/README.md index 28a6cb7063f..e0fc9cbe5ab 100644 --- a/README.md +++ b/README.md @@ -229,11 +229,8 @@ set -o allexport; source .env; set +o allexport > 🛑 **NOTE 2:** Please do not forget about environment variables! Now all environment variables for the Postgres Database which runs in docker are already passed to docker-compose.yaml from the .env file. -> 🛑 **NOTE 3:** If you get an error running `pipenv sync --dev` related to the dependency `greenlet`, install it by running: -```bash -pipenv install greenlet -``` -> 🛑 **NOTE 4:** If the application can't find the `RabbitMQ` service even though it's running normally, change your `RABBITMQ__URL` to your local ip address instead of `localhost` +> 🛑 **NOTE 3:** If the application can't find the `RabbitMQ` service even though it's running normally, change your +`RABBITMQ__URL` to your local ip address instead of `localhost` ## Run the migrations ```bash @@ -329,6 +326,9 @@ make run # Check the code quality make cq +# Check and fix code quality +make cqf + # Check tests passing make test @@ -479,6 +479,12 @@ delete from alembic_version; alembic -c alembic_arbitrary.ini upgrade head ``` +### Update gender_screen and age_screen activity items strings to greek for an applet + +```bash +python src/cli.py patch exec M2-8568 -a +``` + #### Database relation structure ```mermaid diff --git a/src/apps/activities/services/activity.py b/src/apps/activities/services/activity.py index 31af33ed9a6..20eb9bb68ff 100644 --- a/src/apps/activities/services/activity.py +++ b/src/apps/activities/services/activity.py @@ -26,6 +26,7 @@ from apps.schedule.crud.events import ActivityEventsCRUD, EventCRUD from apps.schedule.service.schedule import ScheduleService from apps.workspaces.domain.constants import Role +from infrastructure.logger import logger class ActivityService: @@ -157,6 +158,16 @@ async def update_create(self, applet_id: uuid.UUID, activities_create: list[Acti ) for item in activity_data.items: + if item.name in ["age_screen", "gender_screen"] and item.id is None: + # Implement logging for the age_screen and gender_screen items to trigger alerts + # in Datadog for the Greek version of the applet after translations were rolled back. + # TODO: Remove when full Greek support is available in Admin Panel [M2-8678](https://mindlogger.atlassian.net/browse/M2-8678) + logger.info( # type: ignore + f"Creating {item.name} item for activity {activity_id} in applet_id {applet_id}", + applet_id=str(applet_id), + operation=f"update_{item.name}", + ) + prepared_activity_items.append( PreparedActivityItemUpdate( id=item.id or uuid.uuid4(), diff --git a/src/apps/shared/commands/patch_commands.py b/src/apps/shared/commands/patch_commands.py index 4009de005f6..971adbaaff4 100644 --- a/src/apps/shared/commands/patch_commands.py +++ b/src/apps/shared/commands/patch_commands.py @@ -132,6 +132,12 @@ description="Set alert type to default value 'answer'", ) +PatchRegister.register( + file_path="m2_8568_update_subscale_items_to_greek.py", + task_id="M2-8568", + description="Update subscale items to greek", +) + app = typer.Typer() @@ -178,17 +184,18 @@ async def exec( "-o", help="Workspace owner id", ), + applet_id: Optional[uuid.UUID] = typer.Option(None, "--applet_id", "-a", help="Patch arguments"), ): patch = PatchRegister.get_by_task_id(task_id) if not patch: print(wrap_error_msg("Patch not registered")) else: - await exec_patch(patch, owner_id) + await exec_patch(patch, owner_id, applet_id) return -async def exec_patch(patch: Patch, owner_id: Optional[uuid.UUID]): +async def exec_patch(patch: Patch, owner_id: Optional[uuid.UUID], applet_id: Optional[uuid.UUID]): session_maker = session_manager.get_session() arbitrary = None async with session_maker() as session: @@ -241,16 +248,16 @@ async def exec_patch(patch: Patch, owner_id: Optional[uuid.UUID]): # if manage_session is True, pass sessions to patch_file main if patch.manage_session: - await patch_file.main(session_maker, arbitrary_session_maker) + await patch_file.main(session_maker, arbitrary_session_maker, applet_id=applet_id) else: async with session_maker() as session: async with atomic(session): if arbitrary_session_maker: async with arbitrary_session_maker() as arbitrary_session: # noqa: E501 async with atomic(arbitrary_session): - await patch_file.main(session, arbitrary_session) + await patch_file.main(session, arbitrary_session, applet_id=applet_id) else: - await patch_file.main(session) + await patch_file.main(session, applet_id=applet_id) print( f"[bold green]Patch {patch.task_id} executed[/bold green]" # noqa: E501 diff --git a/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py b/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py new file mode 100644 index 00000000000..ffda637392e --- /dev/null +++ b/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py @@ -0,0 +1,115 @@ +import uuid +from uuid import UUID + +from sqlalchemy import cast, func, select, update +from sqlalchemy.cimmutabledict import immutabledict +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.asyncio import AsyncSession + +from apps.activities.db.schemas import ActivityItemSchema, ActivitySchema + + +async def update_age_screen(session: AsyncSession, applet_id: UUID): + print(f"Updating age screen for applet_id: {applet_id}") + + new_question_value = {"el": "Πόσων χρονών είστε;"} + + update_query = ( + update(ActivityItemSchema) + .where( + ActivityItemSchema.name == "age_screen", + ActivityItemSchema.activity_id.in_(select(ActivitySchema.id).where(ActivitySchema.applet_id == applet_id)), + ) + .values( + question=func.jsonb_set( + ActivityItemSchema.question, + ["en"], + cast(new_question_value["el"], JSONB), + True, + ) + ) + ) + + await session.execute(update_query, execution_options=immutabledict({"synchronize_session": "fetch"})) + print(f"Updated age screen for applet_id: {applet_id}") + + +async def update_gender_screen(session: AsyncSession, applet_id: UUID): + print(f"Updating gender screen for applet_id: {applet_id}") + new_question_value = {"el": "Ποιο φύλο σας αποδόθηκε κατά την γέννησή σας;"} + + translations = { + "Male": "Ανδρας", + "Female": "Γυναίκα", + } + + query = select(ActivityItemSchema.id, ActivityItemSchema.response_values).where( + ActivityItemSchema.name == "gender_screen", + ActivityItemSchema.activity_id.in_(select(ActivitySchema.id).where(ActivitySchema.applet_id == applet_id)), + ) + + res = await session.execute(query) + + for item in res.mappings().all(): + print(f"Updating item id: {item.id}") + + male_index = next( + (index for (index, option) in enumerate(item.response_values["options"]) if option["text"] == "Male"), -1 + ) + + female_index = next( + (index for (index, option) in enumerate(item.response_values["options"]) if option["text"] == "Female"), -1 + ) + + if (male_index == -1) or (female_index == -1): + continue + + update_response_values = func.jsonb_set( + func.jsonb_set( + ActivityItemSchema.response_values, + ["options", str(male_index), "text"], + cast(translations["Male"], JSONB), + True, + ), + ["options", str(female_index), "text"], + cast(translations["Female"], JSONB), + True, + ) + + update_query = ( + update(ActivityItemSchema) + .where( + ActivityItemSchema.name == "gender_screen", + ActivityItemSchema.id == item.id, + ) + .values( + question=func.jsonb_set( + ActivityItemSchema.question, + ["en"], + cast(new_question_value["el"], JSONB), + True, + ), + response_values=update_response_values, + ) + ) + + await session.execute(update_query, execution_options=immutabledict({"synchronize_session": "fetch"})) + print(f"Updated gender screen for applet_id: {applet_id}") + + +async def main( + session: AsyncSession, + arbitrary_session: AsyncSession = None, + applet_id: uuid.UUID | None = None, + *args, + **kwargs, +): + if applet_id is None: + return + + print(f"Updating subscale items to Greek for applet_id: {applet_id}") + + await update_age_screen(session, applet_id) + await update_gender_screen(session, applet_id) + + return From 734bd1f29fdf52460b5199b5cc95ae3847875104 Mon Sep 17 00:00:00 2001 From: Farmer Paul Date: Fri, 14 Feb 2025 12:34:50 -0400 Subject: [PATCH 02/14] feat: Allow designating Single Selection items as response data identifiers (M2-8690) (#1738) --- src/apps/activities/domain/response_type_config.py | 1 + src/apps/activities/tests/fixtures/configs.py | 1 + src/apps/activities/tests/unit/test_activity_item_change.py | 2 ++ src/apps/jsonld_converter/service/document/field.py | 6 +++++- src/apps/jsonld_converter/service/export/activity_item.py | 1 + src/apps/test_data/service.py | 3 +++ 6 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/apps/activities/domain/response_type_config.py b/src/apps/activities/domain/response_type_config.py index 3348e04f4d2..7daac81757c 100644 --- a/src/apps/activities/domain/response_type_config.py +++ b/src/apps/activities/domain/response_type_config.py @@ -162,6 +162,7 @@ class _SelectionConfig(_ScreenConfig, PublicModel): class SingleSelectionConfig(_SelectionConfig, PublicModel): type: Literal[ResponseType.SINGLESELECT] | None auto_advance: bool = False + response_data_identifier: bool = False class MultiSelectionConfig(_SelectionConfig, PublicModel): diff --git a/src/apps/activities/tests/fixtures/configs.py b/src/apps/activities/tests/fixtures/configs.py index 65010229630..b12779ace88 100644 --- a/src/apps/activities/tests/fixtures/configs.py +++ b/src/apps/activities/tests/fixtures/configs.py @@ -50,6 +50,7 @@ def single_select_config(default_config: DefaultConfig) -> SingleSelectionConfig set_alerts=False, add_tooltip=False, set_palette=False, + response_data_identifier=False, **default_config.dict(), type=ResponseType.SINGLESELECT, ) diff --git a/src/apps/activities/tests/unit/test_activity_item_change.py b/src/apps/activities/tests/unit/test_activity_item_change.py index b08db0cd9f7..abc4f3b03d6 100644 --- a/src/apps/activities/tests/unit/test_activity_item_change.py +++ b/src/apps/activities/tests/unit/test_activity_item_change.py @@ -388,6 +388,7 @@ def test_initial_single_selection_config_change( "Add Text Input Option was disabled", "Input Required was disabled", "Auto Advance was disabled", + "Response Data Identifier was disabled", ] assert changes == exp_changes @@ -488,6 +489,7 @@ def test_initial_version_changes( "Add Text Input Option was disabled", "Input Required was disabled", "Auto Advance was disabled", + "Response Data Identifier was disabled", ] changes = item_change_service.get_changes_insert(new_item) assert changes == single_select_exp_changes diff --git a/src/apps/jsonld_converter/service/document/field.py b/src/apps/jsonld_converter/service/document/field.py index ae905b4eb64..35e43c43cf4 100644 --- a/src/apps/jsonld_converter/service/document/field.py +++ b/src/apps/jsonld_converter/service/document/field.py @@ -342,6 +342,7 @@ class ReproFieldRadio(ReproFieldBase): ld_randomize_options: bool | None = None ld_scoring: bool | None = None ld_response_alert: bool | None = None + ld_is_response_identifier: bool | None = None is_multiple: bool = False choices: list[dict] | None = None @@ -357,7 +358,9 @@ async def _process_ld_response_options(self, options_doc: dict, drop=False): self.ld_randomize_options = self.attr_processor.get_attr_value(options_doc, "reproschema:randomizeOptions") self.ld_scoring = self.attr_processor.get_attr_value(options_doc, "reproschema:scoring") self.ld_response_alert = self.attr_processor.get_attr_value(options_doc, "reproschema:responseAlert") - + self.ld_is_response_identifier = self.attr_processor.get_attr_value( + options_doc, "reproschema:isResponseIdentifier" + ) self.choices = self._get_ld_choices_formatted(options_doc) def _build_config(self, _cls: Type | None, **attrs): @@ -367,6 +370,7 @@ def _build_config(self, _cls: Type | None, **attrs): set_alerts=bool(self.ld_response_alert), add_tooltip=False, # TODO set_palette=bool(self.ld_color_palette), # TODO + response_data_identifier=bool(self.ld_is_response_identifier) if not self.is_multiple else None, ) cfg_cls = MultiSelectionConfig if self.is_multiple else SingleSelectionConfig diff --git a/src/apps/jsonld_converter/service/export/activity_item.py b/src/apps/jsonld_converter/service/export/activity_item.py index 7af9aab4fe5..1141ca37fe0 100644 --- a/src/apps/jsonld_converter/service/export/activity_item.py +++ b/src/apps/jsonld_converter/service/export/activity_item.py @@ -144,6 +144,7 @@ def _build_response_options_prop(self, model: ActivityItemFull) -> dict: "valueType": "xsd:anyURI", # todo tokens "randomizeOptions": config.randomize_options, "scoring": config.add_scores, + "isResponseIdentifier": config.response_data_identifier, "responseAlert": config.set_alerts, "colorPalette": config.set_palette, "multipleChoice": False, diff --git a/src/apps/test_data/service.py b/src/apps/test_data/service.py index 064fa2146d1..a474d9c990e 100644 --- a/src/apps/test_data/service.py +++ b/src/apps/test_data/service.py @@ -181,6 +181,7 @@ def generate_response_value_config(type_: ResponseType): ) result["response_values"] = None # type: ignore # noqa: E501 + elif type_ == ResponseType.SINGLESELECT: result["config"] = dict( remove_back_button=False, @@ -191,11 +192,13 @@ def generate_response_value_config(type_: ResponseType): set_alerts=False, add_tooltip=False, set_palette=False, + response_data_identifier=False, additional_response_option=dict( # type: ignore # noqa: E501 text_input_option=False, text_input_required=False, ), ) + result["response_values"] = { "options": [ # type: ignore # noqa: E501 { From 11571c7f69479c1e67c0989725bc509c6afb423e Mon Sep 17 00:00:00 2001 From: Kenroy Gobourne <14842108+sultanofcardio@users.noreply.github.com> Date: Thu, 20 Feb 2025 13:20:57 -0500 Subject: [PATCH 03/14] chore: Schedules database clean up (M2-8495) (#1723) This PR implements the following cleanup operations: - Adds four new columns to the `events` table: - `activity_id`: taken from the `activity_events` table - `activity_flow_id`: taken from the `flow_events` table - `user_id`: taken from the `user_events` table - `event_type`: Enum with value `activity` or `flow` - Removes the `periodicity_id` column from the `events` table - Migrates all the data from the `activity_events`, `flow_events`, and `user_events` tables into the `events` table - Drops the `activity_events`, `flow_events`, `user_events`, and `periodicity` tables - Updates the API code to account for these changes, which mostly included rewriting queries to select only from the `events` table where multiple joins (and sometimes multiple queries) were previously being done These changes are intended to be 100% backwards compatible and should introduce no new features from the client perspective --- src/apps/activities/services/activity.py | 7 +- src/apps/activity_flows/service/flow.py | 7 +- src/apps/applets/commands/applet_ema.py | 78 +- src/apps/schedule/crud/events.py | 811 +++++------------- src/apps/schedule/db/schemas.py | 70 +- src/apps/schedule/domain/schedule/internal.py | 46 +- src/apps/schedule/service/schedule.py | 210 ++--- src/apps/schedule/service/schedule_history.py | 3 + .../workspaces/crud/user_applet_access.py | 14 +- ...25_02_02_18_39-clean_up_schedule_tables.py | 174 ++++ 10 files changed, 514 insertions(+), 906 deletions(-) create mode 100644 src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py diff --git a/src/apps/activities/services/activity.py b/src/apps/activities/services/activity.py index 20eb9bb68ff..d8d6dc94f48 100644 --- a/src/apps/activities/services/activity.py +++ b/src/apps/activities/services/activity.py @@ -23,7 +23,8 @@ from apps.activity_assignments.service import ActivityAssignmentService from apps.activity_flows.crud import FlowsCRUD from apps.applets.crud import AppletsCRUD, UserAppletAccessCRUD -from apps.schedule.crud.events import ActivityEventsCRUD, EventCRUD +from apps.schedule.crud.events import EventCRUD +from apps.schedule.domain.constants import EventType from apps.schedule.service.schedule import ScheduleService from apps.workspaces.domain.constants import Role from infrastructure.logger import logger @@ -115,9 +116,9 @@ async def update_create(self, applet_id: uuid.UUID, activities_create: list[Acti activity_id_key_map: dict[uuid.UUID, uuid.UUID] = dict() prepared_activity_items = list() - all_activities = await ActivityEventsCRUD(self.session).get_by_applet_id(applet_id) + activity_events = await EventCRUD(self.session).get_by_type_and_applet_id(applet_id, EventType.ACTIVITY) - all_activity_ids = [activity.activity_id for activity in all_activities] + all_activity_ids = [activity.activity_id for activity in activity_events if activity.activity_id is not None] # Save new activity ids new_activities = [] diff --git a/src/apps/activity_flows/service/flow.py b/src/apps/activity_flows/service/flow.py index 391d67d7c68..09622746247 100644 --- a/src/apps/activity_flows/service/flow.py +++ b/src/apps/activity_flows/service/flow.py @@ -15,7 +15,8 @@ from apps.activity_flows.service.flow_item import FlowItemService from apps.applets.crud import UserAppletAccessCRUD from apps.applets.domain.applet_history import Version -from apps.schedule.crud.events import EventCRUD, FlowEventsCRUD +from apps.schedule.crud.events import EventCRUD +from apps.schedule.domain.constants import EventType from apps.schedule.service.schedule import ScheduleService from apps.workspaces.domain.constants import Role @@ -89,7 +90,9 @@ async def update_create( schemas = list() prepared_flow_items = list() - all_flows = [flow.flow_id for flow in await FlowEventsCRUD(self.session).get_by_applet_id(applet_id)] + flow_events = await EventCRUD(self.session).get_by_type_and_applet_id(applet_id, EventType.FLOW) + + all_flows = [flow_event.activity_flow_id for flow_event in flow_events if flow_event.activity_flow_id] # Save new flow ids new_flows = [] diff --git a/src/apps/applets/commands/applet_ema.py b/src/apps/applets/commands/applet_ema.py index f14072bf799..ae08cb5ac72 100644 --- a/src/apps/applets/commands/applet_ema.py +++ b/src/apps/applets/commands/applet_ema.py @@ -24,13 +24,7 @@ from apps.job.constants import JobStatus from apps.job.errors import JobStatusError from apps.job.service import JobService -from apps.schedule.db.schemas import ( - ActivityEventsSchema, - EventSchema, - FlowEventsSchema, - PeriodicitySchema, - UserEventsSchema, -) +from apps.schedule.db.schemas import EventSchema from apps.schedule.domain.constants import PeriodicityType from apps.shared.domain.base import PublicModel from apps.subjects.db.schemas import SubjectSchema @@ -239,27 +233,24 @@ async def get_user_flow_events( select( EventSchema.applet_id, EventSchema.id.label("event_id"), - UserEventsSchema.user_id, - FlowEventsSchema.flow_id, - PeriodicitySchema.type.label("event_type"), + EventSchema.user_id, + EventSchema.activity_flow_id.label("flow_id"), + EventSchema.periodicity.label("event_type"), case( ( - PeriodicitySchema.type.in_(("WEEKDAYS", "DAILY")), + EventSchema.periodicity.in_(("WEEKDAYS", "DAILY")), scheduled_date, ), - (PeriodicitySchema.type.in_(("WEEKLY", "MONTHLY")), PeriodicitySchema.start_date), - else_=PeriodicitySchema.selected_date, + (EventSchema.periodicity.in_(("WEEKLY", "MONTHLY")), EventSchema.start_date), + else_=EventSchema.selected_date, ).label("selected_date"), - PeriodicitySchema.start_date, - PeriodicitySchema.end_date, + EventSchema.start_date, + EventSchema.end_date, EventSchema.start_time, EventSchema.end_time, ) .select_from(EventSchema) - .join(UserEventsSchema, UserEventsSchema.event_id == EventSchema.id) - .join(PeriodicitySchema, PeriodicitySchema.id == EventSchema.periodicity_id) - .join(FlowEventsSchema, FlowEventsSchema.event_id == EventSchema.id) - .where(EventSchema.is_deleted == false(), PeriodicitySchema.type != PeriodicityType.ALWAYS) + .where(EventSchema.is_deleted == false(), EventSchema.periodicity != PeriodicityType.ALWAYS) ).cte("user_flow_events") query = ( @@ -327,14 +318,14 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) case PeriodicityType.DAILY: if row.is_crossday_event: row.end_date += datetime.timedelta(days=1) - if schedule_date >= row.start_date and schedule_date <= row.end_date: + if row.start_date <= schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.ONCE: schedule_start_date = row.selected_date row.end_date = row.selected_date if row.is_crossday_event: row.end_date += datetime.timedelta(days=1) - if schedule_date >= schedule_start_date and schedule_date <= row.end_date: + if schedule_start_date <= schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.WEEKDAYS: last_weekday = FRIDAY_WEEKDAY @@ -342,11 +333,7 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) last_weekday = SATURDAY_WEEKDAY if row.end_date.weekday() == FRIDAY_WEEKDAY: row.end_date += datetime.timedelta(days=1) - if ( - schedule_date.weekday() <= last_weekday - and schedule_date >= row.start_date - and schedule_date <= row.end_date - ): + if schedule_date.weekday() <= last_weekday and row.start_date <= schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.WEEKLY: scheduled_weekday = row.start_date.weekday() @@ -362,10 +349,8 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) if row.start_date.weekday() == row.end_date.weekday(): row.end_date += datetime.timedelta(days=1) if ( - (schedule_date.weekday() == scheduled_weekday or schedule_date.weekday() == following_weekday) - and schedule_date >= row.start_date - and schedule_date <= row.end_date - ): + schedule_date.weekday() == scheduled_weekday or schedule_date.weekday() == following_weekday + ) and row.start_date <= schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.MONTHLY: scheduled_monthday = row.start_date.day @@ -382,14 +367,10 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) ): row.end_date += datetime.timedelta(days=1) if ( - ( - schedule_date.day == scheduled_monthday - or schedule_date.day == following_monthday - or (is_last_day_of_month(schedule_date) and row.start_date) - ) - and schedule_date >= row.start_date - and schedule_date <= row.end_date - ): + schedule_date.day == scheduled_monthday + or schedule_date.day == following_monthday + or (is_last_day_of_month(schedule_date) and row.start_date) + ) and row.start_date <= schedule_date <= row.end_date: filtered.append(row) return filtered @@ -505,27 +486,24 @@ async def get_user_activity_events( select( EventSchema.applet_id, EventSchema.id.label("event_id"), - UserEventsSchema.user_id, - ActivityEventsSchema.activity_id, - PeriodicitySchema.type.label("event_type"), + EventSchema.user_id, + EventSchema.activity_id, + EventSchema.periodicity.label("event_type"), case( ( - PeriodicitySchema.type.in_(("WEEKDAYS", "DAILY")), + EventSchema.periodicity.in_(("WEEKDAYS", "DAILY")), scheduled_date, ), - (PeriodicitySchema.type.in_(("WEEKLY", "MONTHLY")), PeriodicitySchema.start_date), - else_=PeriodicitySchema.selected_date, + (EventSchema.periodicity.in_(("WEEKLY", "MONTHLY")), EventSchema.start_date), + else_=EventSchema.selected_date, ).label("selected_date"), - PeriodicitySchema.start_date, - PeriodicitySchema.end_date, + EventSchema.start_date, + EventSchema.end_date, EventSchema.start_time, EventSchema.end_time, ) .select_from(EventSchema) - .join(UserEventsSchema, UserEventsSchema.event_id == EventSchema.id) - .join(PeriodicitySchema, PeriodicitySchema.id == EventSchema.periodicity_id) - .join(ActivityEventsSchema, ActivityEventsSchema.event_id == EventSchema.id) - .where(EventSchema.is_deleted == false(), PeriodicitySchema.type != PeriodicityType.ALWAYS) + .where(EventSchema.is_deleted == false(), EventSchema.periodicity != PeriodicityType.ALWAYS) ).cte("user_activity_events") query = ( diff --git a/src/apps/schedule/crud/events.py b/src/apps/schedule/crud/events.py index b95e0cfd5b6..5ebc4870db7 100644 --- a/src/apps/schedule/crud/events.py +++ b/src/apps/schedule/crud/events.py @@ -4,48 +4,31 @@ from sqlalchemy import Integer, update from sqlalchemy.exc import IntegrityError, MultipleResultsFound, NoResultFound from sqlalchemy.orm import Query -from sqlalchemy.sql import and_, delete, distinct, func, or_, select +from sqlalchemy.sql import and_, delete, func, or_, select from sqlalchemy.sql.expression import case, cast from apps.activities.db.schemas import ActivitySchema from apps.activity_flows.db.schemas import ActivityFlowSchema from apps.schedule.db.schemas import ( - ActivityEventsSchema, EventSchema, - FlowEventsSchema, - UserEventsSchema, ) -from apps.schedule.domain.constants import PeriodicityType +from apps.schedule.domain.constants import EventType, PeriodicityType from apps.schedule.domain.schedule.internal import ( - ActivityEvent, - ActivityEventCreate, Event, EventCreate, EventFull, EventUpdate, - FlowEvent, - FlowEventCreate, - UserEvent, - UserEventCreate, ) from apps.schedule.domain.schedule.public import ActivityEventCount, FlowEventCount from apps.schedule.errors import ( - ActivityEventAlreadyExists, EventError, EventNotFoundError, - FlowEventAlreadyExists, - UserEventAlreadyExists, ) from apps.workspaces.db.schemas import UserAppletAccessSchema from apps.workspaces.domain.constants import Role from infrastructure.database import BaseCRUD -__all__ = [ - "EventCRUD", - "UserEventsCRUD", - "ActivityEventsCRUD", - "FlowEventsCRUD", -] +__all__ = ["EventCRUD"] class EventCRUD(BaseCRUD[EventSchema]): @@ -83,17 +66,12 @@ async def get_all_by_applet_id_with_filter( ) -> list[EventSchema]: """Return event instance.""" query: Query = select(EventSchema) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, - ) query = query.where(EventSchema.applet_id == applet_id) query = query.where(EventSchema.is_deleted.is_(False)) if respondent_id: - query = query.where(UserEventsSchema.user_id == respondent_id) + query = query.where(EventSchema.user_id == respondent_id) else: - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 + query = query.where(EventSchema.user_id.is_(None)) result = await self._execute(query) return result.scalars().all() @@ -101,15 +79,10 @@ async def get_all_by_applet_id_with_filter( async def get_public_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: """Return event instance.""" query: Query = select(EventSchema) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, - ) query = query.where(EventSchema.applet_id == applet_id) query = query.distinct(EventSchema.id) - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 - query = query.where(EventSchema.is_deleted == False) # noqa: E712 + query = query.where(EventSchema.user_id.is_(None)) + query = query.where(EventSchema.is_deleted.is_(False)) result = await self._execute(query) return result.scalars().all() @@ -152,34 +125,12 @@ async def update(self, pk: uuid.UUID, schema: EventUpdate) -> Event: async def get_all_by_applet_and_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[EventFull]: """Get events by applet_id and user_id""" + query: Query = select(EventSchema) - query: Query = select( - EventSchema, - ActivityEventsSchema.activity_id, - FlowEventsSchema.flow_id, - ) - query = query.join( - UserEventsSchema, - and_( - EventSchema.id == UserEventsSchema.event_id, - UserEventsSchema.user_id == user_id, - ), - ) - - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, + query = query.where( + EventSchema.applet_id == applet_id, EventSchema.user_id == user_id, EventSchema.is_deleted.is_(False) ) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 - db_result = await self._execute(query) events = [] @@ -199,8 +150,9 @@ async def get_all_by_applet_and_user(self, applet_id: uuid.UUID, user_id: uuid.U start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.activity_id, - flow_id=row.flow_id, + activity_id=row.EventSchema.activity_id, + flow_id=row.EventSchema.activity_flow_id, + event_type=row.EventSchema.event_type, ) ) return events @@ -215,32 +167,12 @@ async def get_all_by_applets_and_user( """Get events by applet_ids and user_id Return {applet_id: [EventFull]}""" - query: Query = select( - EventSchema, - ActivityEventsSchema.activity_id, - FlowEventsSchema.flow_id, - ) - query = query.join( - UserEventsSchema, - and_( - EventSchema.id == UserEventsSchema.event_id, - UserEventsSchema.user_id == user_id, - ), - ) - - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, + query: Query = select(EventSchema) + query = query.where( + EventSchema.applet_id.in_(applet_ids), + EventSchema.is_deleted.is_(False), + EventSchema.user_id == user_id, ) - - query = query.where(EventSchema.applet_id.in_(applet_ids)) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 if min_end_date and max_start_date: query = query.where( or_( @@ -276,7 +208,7 @@ async def get_all_by_applets_and_user( id=row.EventSchema.id, start_time=row.EventSchema.start_time, end_time=row.EventSchema.end_time, - access_before_schedule=row.EventSchema.access_before_schedule, # noqa: E501 + access_before_schedule=row.EventSchema.access_before_schedule, one_time_completion=row.EventSchema.one_time_completion, timer=row.EventSchema.timer, timer_type=row.EventSchema.timer_type, @@ -286,8 +218,9 @@ async def get_all_by_applets_and_user( start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.activity_id, - flow_id=row.flow_id, + activity_id=row.EventSchema.activity_id, + flow_id=row.EventSchema.activity_flow_id, + event_type=row.EventSchema.event_type, ) ) @@ -308,26 +241,15 @@ async def get_all_by_applet_and_activity( ) -> list[EventSchema]: """Get events by applet_id and activity_id""" query: Query = select(EventSchema) - query = query.join( - ActivityEventsSchema, - and_( - EventSchema.id == ActivityEventsSchema.event_id, - ActivityEventsSchema.activity_id == activity_id, - ), - ) - # differentiate general and individual events - query = query.join( - UserEventsSchema, - EventSchema.id == UserEventsSchema.event_id, - isouter=True, - ) # select only always available if requested if only_always_available: - query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted.is_(False)) - - query = query.where(UserEventsSchema.user_id == respondent_id) + query = query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) + query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.activity_id == activity_id, + EventSchema.user_id == respondent_id, + ) result = await self._execute(query) return result.scalars().all() @@ -344,34 +266,18 @@ async def validate_existing_always_available( query = query.select_from(EventSchema) if activity_id: - query = query.join( - ActivityEventsSchema, - and_( - EventSchema.id == ActivityEventsSchema.event_id, - ActivityEventsSchema.activity_id == activity_id, - ), - ) + query = query.where(EventSchema.activity_id == activity_id) if flow_id: - query = query.join( - FlowEventsSchema, - and_( - EventSchema.id == FlowEventsSchema.event_id, - FlowEventsSchema.flow_id == flow_id, - ), - ) + query = query.where(EventSchema.activity_flow_id == flow_id) - # differentiate general and individual events - query = query.join( - UserEventsSchema, - EventSchema.id == UserEventsSchema.event_id, - isouter=True, + query = query.where( + EventSchema.periodicity == PeriodicityType.ALWAYS, + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == respondent_id, ) - query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted.is_(False)) - query = query.where(UserEventsSchema.user_id == respondent_id) query = query.limit(1) result = await self._execute(query) @@ -386,102 +292,48 @@ async def get_all_by_applet_and_flow( ) -> list[EventSchema]: """Get events by applet_id and flow_id""" query: Query = select(EventSchema) - query = query.join( - FlowEventsSchema, - and_( - EventSchema.id == FlowEventsSchema.event_id, - FlowEventsSchema.flow_id == flow_id, - ), - ) - - # differentiate general and individual events - query = query.join( - UserEventsSchema, - EventSchema.id == UserEventsSchema.event_id, - isouter=True, - ) # select only always available if requested if only_always_available: - query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) + query = query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted.is_(False)) - - query = query.where(UserEventsSchema.user_id == respondent_id) + query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == respondent_id, + EventSchema.activity_flow_id == flow_id, + ) result = await self._execute(query) return result.scalars().all() async def get_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[EventFull]: """Get general events by applet_id and user_id""" - # select flow_ids to exclude - flow_ids = ( - select(distinct(FlowEventsSchema.flow_id)) - .select_from(FlowEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == FlowEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == FlowEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id == applet_id) - ) - activity_ids = ( - select(distinct(ActivityEventsSchema.activity_id)) - .select_from(ActivityEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == ActivityEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == ActivityEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id == applet_id) - ) - - query: Query = select( - EventSchema, - ActivityEventsSchema.activity_id, - FlowEventsSchema.flow_id, - ) - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, + # select flow and activity ids to exclude + ids = ( + select( + func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), + ) + .select_from(EventSchema) + .where(EventSchema.user_id == user_id, EventSchema.applet_id == applet_id) + .group_by("entity_id") ) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 + query: Query = select(EventSchema) query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), or_( - FlowEventsSchema.flow_id.is_(None), - FlowEventsSchema.flow_id.not_in(flow_ids), - ) - ) - query = query.where( + EventSchema.activity_flow_id.is_(None), + EventSchema.activity_flow_id.not_in(ids), + ), or_( - ActivityEventsSchema.activity_id.is_(None), - ActivityEventsSchema.activity_id.not_in(activity_ids), - ) + EventSchema.activity_id.is_(None), + EventSchema.activity_id.not_in(ids), + ), + EventSchema.user_id.is_(None), ) - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 db_result = await self._execute(query) @@ -502,8 +354,9 @@ async def get_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.U start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.activity_id, - flow_id=row.flow_id, + activity_id=row.EventSchema.activity_id, + flow_id=row.EventSchema.activity_flow_id, + event_type=row.EventSchema.event_type, ) ) return events @@ -516,73 +369,32 @@ async def get_general_events_by_applets_and_user( max_start_date: date | None = None, ) -> tuple[dict[uuid.UUID, list[EventFull]], set[uuid.UUID]]: """Get general events by applet_id and user_id""" - # select flow_ids to exclude - flow_ids = ( - select(distinct(FlowEventsSchema.flow_id)) - .select_from(FlowEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == FlowEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == FlowEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id.in_(applet_ids)) - ) - activity_ids = ( - select(distinct(ActivityEventsSchema.activity_id)) - .select_from(ActivityEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == ActivityEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == ActivityEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id.in_(applet_ids)) - ) - query: Query = select( - EventSchema, - ActivityEventsSchema.activity_id, - FlowEventsSchema.flow_id, + # select flow and activity ids to exclude + ids = ( + select( + func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), + ) + .select_from(EventSchema) + .where(EventSchema.user_id == user_id, EventSchema.applet_id.in_(applet_ids)) + .group_by("entity_id") ) - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, - ) + query: Query = select(EventSchema) - query = query.where(EventSchema.applet_id.in_(applet_ids)) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 query = query.where( + EventSchema.applet_id.in_(applet_ids), + EventSchema.is_deleted.is_(False), or_( - FlowEventsSchema.flow_id.is_(None), - FlowEventsSchema.flow_id.not_in(flow_ids), - ) - ) - query = query.where( + EventSchema.activity_flow_id.is_(None), + EventSchema.activity_flow_id.not_in(ids), + ), or_( - ActivityEventsSchema.activity_id.is_(None), - ActivityEventsSchema.activity_id.not_in(activity_ids), - ) + EventSchema.activity_id.is_(None), + EventSchema.activity_id.not_in(ids), + ), + EventSchema.user_id.is_(None), ) - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 if min_end_date and max_start_date: query = query.where( or_( @@ -628,8 +440,9 @@ async def get_general_events_by_applets_and_user( start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.activity_id, - flow_id=row.flow_id, + activity_id=row.EventSchema.activity_id, + flow_id=row.EventSchema.activity_flow_id, + event_type=row.EventSchema.event_type, ) ) @@ -637,70 +450,34 @@ async def get_general_events_by_applets_and_user( async def count_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> int: """Count general events by applet_id and user_id""" - flow_ids = ( - select(distinct(FlowEventsSchema.flow_id)) - .select_from(FlowEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == FlowEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == FlowEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id == applet_id) - ) - activity_ids = ( - select(distinct(ActivityEventsSchema.activity_id)) - .select_from(ActivityEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == ActivityEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == ActivityEventsSchema.event_id, + + # select flow and activity ids to exclude + ids = ( + select( + func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id == applet_id) + .select_from(EventSchema) + .where(EventSchema.user_id == user_id, EventSchema.applet_id == applet_id) + .group_by("entity_id") ) query: Query = select( func.count(EventSchema.id).label("count"), ) - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), or_( - FlowEventsSchema.flow_id.is_(None), - FlowEventsSchema.flow_id.not_in(flow_ids), - ) - ) - query = query.where( + EventSchema.activity_flow_id.is_(None), + EventSchema.activity_flow_id.not_in(ids), + ), or_( - ActivityEventsSchema.activity_id.is_(None), - ActivityEventsSchema.activity_id.not_in(activity_ids), - ) + EventSchema.activity_id.is_(None), + EventSchema.activity_id.not_in(ids), + ), + EventSchema.user_id.is_(None), ) - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 db_result = await self._execute(query) return db_result.scalar() @@ -709,16 +486,11 @@ async def count_individual_events_by_user(self, applet_id: uuid.UUID, user_id: u """Count individual events by applet_id and user_id""" query: Query = select(func.count(EventSchema.id)) - query = query.join( - UserEventsSchema, - and_( - EventSchema.id == UserEventsSchema.event_id, - UserEventsSchema.user_id == user_id, - ), + query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == user_id, ) - - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 db_result = await self._execute(query) return db_result.scalar() @@ -729,21 +501,13 @@ async def get_all_by_activity_flow_ids( is_activity: bool, ) -> list[EventSchema]: """Return events for given activity ids.""" - query: Query = select(self.schema_class) - query = query.where(self.schema_class.applet_id == applet_id) + query: Query = select(EventSchema) + query = query.where(EventSchema.applet_id == applet_id) if is_activity: - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == self.schema_class.id, - ) - query = query.where(ActivityEventsSchema.activity_id.in_(activity_ids)) + query = query.where(EventSchema.activity_id.in_(activity_ids)) else: - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == self.schema_class.id, - ) - query = query.where(FlowEventsSchema.flow_id.in_(activity_ids)) + query = query.where(EventSchema.activity_flow_id.in_(activity_ids)) result = await self._execute(query) events = result.scalars().all() @@ -751,323 +515,140 @@ async def get_all_by_activity_flow_ids( async def get_default_schedule_user_ids_by_applet_id(self, applet_id: uuid.UUID) -> list[uuid.UUID]: """Return user ids for default schedule.""" - individual_schedule_users = ( - select(UserEventsSchema.user_id) - .join(EventSchema, UserEventsSchema.event_id == EventSchema.id) - .where(EventSchema.applet_id == applet_id) - .where(EventSchema.is_deleted == False) # noqa: E712 + individual_schedule_users = select(EventSchema.user_id).where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id.isnot(None), ) + query: Query = select(UserAppletAccessSchema.user_id.label("user_id")) query = query.where(UserAppletAccessSchema.applet_id == applet_id) query = query.where(UserAppletAccessSchema.role == Role.RESPONDENT) - query = query.where(UserAppletAccessSchema.is_deleted == False) # noqa: E712 + query = query.where(UserAppletAccessSchema.is_deleted.is_(False)) query = query.where(UserAppletAccessSchema.user_id.not_in(individual_schedule_users)) result = await self._execute(query) result = result.scalars().all() return result - -class UserEventsCRUD(BaseCRUD[UserEventsSchema]): - schema_class = UserEventsSchema - - async def save(self, schema: UserEventCreate) -> UserEvent: - """Return user event instance and the created information.""" - try: - instance: UserEventsSchema = await self._create(UserEventsSchema(**schema.dict())) - except IntegrityError: - raise UserEventAlreadyExists(user_id=schema.user_id, event_id=schema.event_id) - - user_event: UserEvent = UserEvent.from_orm(instance) - return user_event - - async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: - """Return user event instances.""" - query: Query = select(distinct(UserEventsSchema.user_id)) - query = query.where(UserEventsSchema.event_id == event_id) - query = query.where(UserEventsSchema.is_deleted == False) # noqa: E712 - db_result = await self._execute(query) - - try: - result: uuid.UUID = db_result.scalars().one_or_none() - except MultipleResultsFound: - raise EventError() - - return result - - async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): - """Delete all user events by event ids.""" - query: Query = delete(UserEventsSchema) - query = query.where(UserEventsSchema.event_id.in_(event_ids)) - await self._execute(query) - - async def delete_all_by_events_and_user(self, event_ids: list[uuid.UUID], user_id: uuid.UUID): - """Delete all user events by event ids.""" - query: Query = delete(UserEventsSchema) - query = query.where(UserEventsSchema.event_id.in_(event_ids)) - query = query.where(UserEventsSchema.user_id == user_id) - await self._execute(query) - - -class ActivityEventsCRUD(BaseCRUD[ActivityEventsSchema]): - schema_class = ActivityEventsSchema - - async def save(self, schema: ActivityEventCreate) -> ActivityEvent: - """Return activity event instance and the created information.""" - - try: - instance: ActivityEventsSchema = await self._create(ActivityEventsSchema(**schema.dict())) - except IntegrityError: - raise ActivityEventAlreadyExists(activity_id=schema.activity_id, event_id=schema.event_id) - - activity_event: ActivityEvent = ActivityEvent.from_orm(instance) - return activity_event - - async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: - """Return activity event instances.""" - query: Query = select(ActivityEventsSchema.activity_id) - query = query.where(ActivityEventsSchema.event_id == event_id) - query = query.where( - ActivityEventsSchema.is_deleted == False # noqa: E712 - ) - result = await self._execute(query) - - try: - activity_id = result.scalars().one_or_none() - except MultipleResultsFound: - raise EventError() - return activity_id - - async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): - """Delete all activity events by event ids.""" - query: Query = delete(ActivityEventsSchema) - query = query.where(ActivityEventsSchema.event_id.in_(event_ids)) - await self._execute(query) - - async def count_by_applet(self, applet_id: uuid.UUID) -> list[ActivityEventCount]: - """Return activity ids with event count.""" + async def count_by_activity(self, activity_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: + """Return event count.""" query: Query = select( - ActivitySchema.id, - func.count(ActivityEventsSchema.event_id).label("count"), - ActivitySchema.name, + func.count(EventSchema.id).label("count"), ) - query = query.select_from(ActivitySchema) - query = query.join( - ActivityEventsSchema, - and_( - ActivitySchema.id == ActivityEventsSchema.activity_id, - ActivityEventsSchema.is_deleted == False, # noqa: E712 - ), - isouter=True, + query = query.where( + EventSchema.activity_id == activity_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == respondent_id, ) - query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) - - query = query.filter(ActivitySchema.is_deleted == False) # noqa: E712 - query = query.filter(ActivitySchema.applet_id == applet_id) - query = query.filter(EventSchema.periodicity != PeriodicityType.ALWAYS) - query = query.group_by(ActivitySchema.applet_id, ActivitySchema.id) result = await self._execute(query) - activity_event_counts: list[ActivityEventCount] = [ - ActivityEventCount( - activity_id=activity_id, - count=count, - activity_name=name, - ) - for activity_id, count, name in result - ] - - return activity_event_counts + count: int = result.scalar() + return count - async def count_by_activity(self, activity_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: + async def count_by_flow(self, flow_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: """Return event count.""" query: Query = select( - func.count(ActivityEventsSchema.event_id).label("count"), - ) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == ActivityEventsSchema.event_id, - isouter=True, + func.count(EventSchema.id).label("count"), ) - query = query.filter(ActivityEventsSchema.activity_id == activity_id) query = query.filter( - ActivityEventsSchema.is_deleted == False # noqa: E712 + EventSchema.activity_flow_id == flow_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == respondent_id, ) - query = query.filter(UserEventsSchema.user_id == respondent_id) result = await self._execute(query) count: int = result.scalar() return count - async def get_by_event_ids(self, event_ids: list[uuid.UUID]) -> list[uuid.UUID]: - """Return activity event instances.""" - query: Query = select(distinct(ActivityEventsSchema.activity_id)) - query = query.where(ActivityEventsSchema.event_id.in_(event_ids)) - result = await self._execute(query) - activity_ids = result.scalars().all() - return activity_ids - - async def get_by_applet_id(self, applet_id: uuid.UUID) -> list[ActivityEvent]: - """Return activity event instances.""" - query: Query = select(ActivityEventsSchema) - query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) - query = query.where(EventSchema.applet_id == applet_id) - result = await self._execute(query) - activity_events = result.scalars().all() + async def count_by_applet(self, applet_id: uuid.UUID) -> tuple[list[ActivityEventCount], list[FlowEventCount]]: + """Return activity ids and flow ids with event count.""" - return [ActivityEvent.from_orm(activity_event) for activity_event in activity_events] - - async def get_by_applet_and_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[ActivityEvent]: - """Return activity event instances.""" - query: Query = select(ActivityEventsSchema) - query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) - query = query.join(UserEventsSchema, EventSchema.id == UserEventsSchema.event_id) - query = query.join( - ActivitySchema, - ActivityEventsSchema.activity_id == ActivitySchema.id, + query: Query = select( + ActivitySchema.id.label("activity_id"), + ActivitySchema.name.label("activity_name"), + ActivityFlowSchema.id.label("flow_id"), + ActivityFlowSchema.name.label("flow_name"), + func.count(EventSchema.id).label("count"), ) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(UserEventsSchema.user_id == user_id) - result = await self._execute(query) - activity_events = result.scalars().all() - - return [ActivityEvent.from_orm(activity_event) for activity_event in activity_events] - - async def get_missing_events(self, activity_ids: list[uuid.UUID]) -> list[uuid.UUID]: - query: Query = select(ActivityEventsSchema.activity_id) - query.join( + query = query.select_from(EventSchema) + query = query.join( ActivitySchema, and_( - ActivitySchema.id == ActivityEventsSchema.activity_id, - ActivitySchema.is_reviewable.is_(False), + ActivitySchema.id == EventSchema.activity_id, + ActivitySchema.is_deleted.is_(False), ), + isouter=True, ) - query.where(ActivityEventsSchema.activity_id.in_(activity_ids)) - res = await self._execute(query) - db_result = res.scalars().all() - return list(set(activity_ids) - set(db_result)) - - -class FlowEventsCRUD(BaseCRUD[FlowEventsSchema]): - schema_class = FlowEventsSchema - - async def save(self, schema: FlowEventCreate) -> FlowEvent: - """Return flow event instance and the created information.""" - try: - instance: FlowEventsSchema = await self._create(FlowEventsSchema(**schema.dict())) - except IntegrityError: - raise FlowEventAlreadyExists(flow_id=schema.flow_id, event_id=schema.event_id) - - flow_event: FlowEvent = FlowEvent.from_orm(instance) - return flow_event - - async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: - """Return flow event instances.""" - query: Query = select(FlowEventsSchema.flow_id) - query = query.where(FlowEventsSchema.event_id == event_id) - query = query.where(FlowEventsSchema.is_deleted == False) # noqa: E712 - result = await self._execute(query) - - try: - flow_id: uuid.UUID = result.scalars().one_or_none() - except MultipleResultsFound: - raise EventError(message=f"Event{event_id} is used in multiple flows".format(event_id=event_id)) - - return flow_id - - async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): - """Delete all flow events by event ids.""" - query: Query = delete(FlowEventsSchema) - query = query.where(FlowEventsSchema.event_id.in_(event_ids)) - await self._execute(query) - - async def count_by_applet(self, applet_id: uuid.UUID) -> list[FlowEventCount]: - """Return flow ids with event count.""" - - query: Query = select( - ActivityFlowSchema.id, - func.count(FlowEventsSchema.id).label("count"), - ActivityFlowSchema.name, - ) - query = query.select_from(ActivityFlowSchema) - query = query.join( - FlowEventsSchema, + ActivityFlowSchema, and_( - FlowEventsSchema.flow_id == ActivityFlowSchema.id, - FlowEventsSchema.is_deleted == False, # noqa: E712 + ActivityFlowSchema.id == EventSchema.activity_flow_id, + ActivityFlowSchema.is_deleted.is_(False), ), isouter=True, ) - query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) - query = query.filter(ActivityFlowSchema.applet_id == applet_id) - query = query.filter( - ActivityFlowSchema.is_deleted == False # noqa: E712 + query = query.where( + EventSchema.is_deleted.is_(False), + EventSchema.applet_id == applet_id, + EventSchema.periodicity != PeriodicityType.ALWAYS, ) - query = query.filter(EventSchema.periodicity != PeriodicityType.ALWAYS) - query = query.group_by(ActivityFlowSchema.applet_id, ActivityFlowSchema.id) + query = query.group_by(EventSchema.applet_id, ActivitySchema.id, ActivityFlowSchema.id) result = await self._execute(query) - flow_event_counts: list[FlowEventCount] = [ - FlowEventCount( - flow_id=flow_id, - count=count, - flow_name=name, - ) - for flow_id, count, name in result - ] - - return flow_event_counts - - async def get_by_event_ids(self, event_ids: list[uuid.UUID]) -> list[uuid.UUID]: - """Return flow event instances.""" - query: Query = select(distinct(FlowEventsSchema.flow_id)) - query = query.where(FlowEventsSchema.event_id.in_(event_ids)) - result = await self._execute(query) - flow_ids = result.scalars().all() - return flow_ids + activity_event_counts: list[ActivityEventCount] = [] + flow_event_counts: list[FlowEventCount] = [] + + for activity_id, activity_name, flow_id, flow_name, count in result: + if activity_id: + activity_event_counts.append( + ActivityEventCount( + activity_id=activity_id, + count=count, + activity_name=activity_name, + ) + ) + if flow_id: + flow_event_counts.append( + FlowEventCount( + flow_id=flow_id, + count=count, + flow_name=flow_name, + ) + ) - async def count_by_flow(self, flow_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: - """Return event count.""" + return activity_event_counts, flow_event_counts - query: Query = select( - func.count(FlowEventsSchema.event_id).label("count"), - ) - query = query.join( - UserEventsSchema, - FlowEventsSchema.event_id == UserEventsSchema.event_id, - isouter=True, - ) - query = query.filter(FlowEventsSchema.flow_id == flow_id) - query = query.filter( - FlowEventsSchema.is_deleted == False # noqa: E712 + async def get_activities_without_events(self, activity_ids: list[uuid.UUID]) -> list[uuid.UUID]: + query: Query = select(EventSchema.activity_id) + query.join( + ActivitySchema, + and_( + ActivitySchema.id == EventSchema.activity_id, + ActivitySchema.is_reviewable.is_(False), + ), ) - query = query.filter(UserEventsSchema.user_id == respondent_id) - result = await self._execute(query) - - count: int = result.scalar() - return count + query.where(EventSchema.activity_id.in_(activity_ids)) + res = await self._execute(query) + db_result = res.scalars().all() + return list(set(activity_ids) - set(db_result)) - async def get_by_applet_id(self, applet_id: uuid.UUID) -> list[FlowEvent]: - """Return flow event instances.""" - query: Query = select(FlowEventsSchema) - query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) - query = query.where(EventSchema.applet_id == applet_id) - result = await self._execute(query) - flow_events = result.scalars().all() + async def get_by_type_and_applet_id(self, applet_id: uuid.UUID, event_type: EventType) -> list[Event]: + """Return event instances of type flow.""" + query: Query = select(EventSchema) + query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.event_type == event_type, + ) - return [FlowEvent.from_orm(flow_event) for flow_event in flow_events] + if event_type == EventType.FLOW: + query = query.where(EventSchema.activity_flow_id.isnot(None)) + else: + query = query.where(EventSchema.activity_id.isnot(None)) - async def get_by_applet_and_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[FlowEvent]: - """Return flow event instances.""" - query: Query = select(FlowEventsSchema) - query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) - query = query.join(UserEventsSchema, EventSchema.id == UserEventsSchema.event_id) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(UserEventsSchema.user_id == user_id) result = await self._execute(query) flow_events = result.scalars().all() - return [FlowEvent.from_orm(flow_event) for flow_event in flow_events] + return [Event.from_orm(flow_event) for flow_event in flow_events] diff --git a/src/apps/schedule/db/schemas.py b/src/apps/schedule/db/schemas.py index bdfade12234..dcc6f654876 100644 --- a/src/apps/schedule/db/schemas.py +++ b/src/apps/schedule/db/schemas.py @@ -1,5 +1,4 @@ import datetime -import uuid from sqlalchemy import Boolean, Column, Date, ForeignKey, Integer, Interval, String, Time, UniqueConstraint, text from sqlalchemy.dialects.postgresql import ENUM, UUID @@ -8,15 +7,6 @@ from infrastructure.database.mixins import HistoryAware -class PeriodicitySchema(Base): - __tablename__ = "periodicity" - - type = Column(String(10), nullable=False) # Options: ONCE, DAILY, WEEKLY, WEEKDAYS, MONTHLY, ALWAYS - start_date = Column(Date, nullable=True) - end_date = Column(Date, nullable=True) - selected_date = Column(Date, nullable=True) - - class _BaseEventSchema: start_time = Column(Time, nullable=True) end_time = Column(Time, nullable=True) @@ -36,17 +26,16 @@ class _BaseEventSchema: start_date = Column(Date, nullable=True) end_date = Column(Date, nullable=True) selected_date = Column(Date, nullable=True) + event_type = Column(ENUM("activity", "flow", name="event_type_enum", create_type=False), nullable=False) + activity_id = Column(UUID(as_uuid=True), nullable=True) + activity_flow_id = Column(UUID(as_uuid=True), nullable=True) class EventSchema(_BaseEventSchema, Base): __tablename__ = "events" - periodicity_id = Column( - UUID(as_uuid=True), - default=lambda: uuid.uuid4(), - server_default=text("gen_random_uuid()"), - ) applet_id = Column(ForeignKey("applets.id", ondelete="CASCADE"), nullable=False) + user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=True) class EventHistorySchema(_BaseEventSchema, HistoryAware, Base): @@ -54,9 +43,6 @@ class EventHistorySchema(_BaseEventSchema, HistoryAware, Base): id_version = Column(String(), primary_key=True) id = Column(UUID(as_uuid=True)) - event_type = Column(ENUM("activity", "flow", name="event_type_enum", create_type=False), nullable=False) - activity_id = Column(UUID(as_uuid=True), nullable=True) - activity_flow_id = Column(UUID(as_uuid=True), nullable=True) user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=True) @@ -76,54 +62,6 @@ class AppletEventsSchema(Base): ) -class UserEventsSchema(Base): - __tablename__ = "user_events" - - user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=False) - event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) - - __table_args__ = ( - UniqueConstraint( - "user_id", - "event_id", - "is_deleted", - name="_unique_user_events", - ), - ) - - -class ActivityEventsSchema(Base): - __tablename__ = "activity_events" - - activity_id = Column(UUID(as_uuid=True), nullable=False) - event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) - - __table_args__ = ( - UniqueConstraint( - "activity_id", - "event_id", - "is_deleted", - name="_unique_activity_events", - ), - ) - - -class FlowEventsSchema(Base): - __tablename__ = "flow_events" - - flow_id = Column(UUID(as_uuid=True), nullable=False) - event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) - - __table_args__ = ( - UniqueConstraint( - "flow_id", - "event_id", - "is_deleted", - name="_unique_flow_events", - ), - ) - - class _BaseNotificationSchema: from_time = Column(Time, nullable=True) to_time = Column(Time, nullable=True) diff --git a/src/apps/schedule/domain/schedule/internal.py b/src/apps/schedule/domain/schedule/internal.py index dc47f06c707..f1761eefb6a 100644 --- a/src/apps/schedule/domain/schedule/internal.py +++ b/src/apps/schedule/domain/schedule/internal.py @@ -3,7 +3,7 @@ from pydantic import Field, NonNegativeInt, root_validator -from apps.schedule.domain.constants import AvailabilityType, PeriodicityType, TimerType +from apps.schedule.domain.constants import AvailabilityType, EventType, PeriodicityType, TimerType from apps.schedule.domain.schedule.base import BaseEvent, BaseNotificationSetting, BaseReminderSetting from apps.schedule.domain.schedule.public import ( EventAvailabilityDto, @@ -20,14 +20,8 @@ __all__ = [ "Event", "ScheduleEvent", - "UserEvent", - "ActivityEvent", - "FlowEvent", "EventCreate", "EventUpdate", - "UserEventCreate", - "ActivityEventCreate", - "FlowEventCreate", "EventFull", "NotificationSettingCreate", "NotificationSetting", @@ -45,6 +39,10 @@ class EventCreate(BaseEvent, InternalModel): None, description="If type is WEEKLY, MONTHLY or ONCE, selectedDate must be set.", ) + user_id: uuid.UUID | None = None + activity_id: uuid.UUID | None = None + activity_flow_id: uuid.UUID | None = None + event_type: EventType @root_validator def validate_periodicity(cls, values): @@ -66,39 +64,6 @@ class Event(EventCreate, InternalModel): version: str -class UserEventCreate(InternalModel): - user_id: uuid.UUID - event_id: uuid.UUID - - -class UserEvent(UserEventCreate, InternalModel): - """UserEvent of a schedule""" - - id: uuid.UUID - - -class ActivityEventCreate(InternalModel): - activity_id: uuid.UUID - event_id: uuid.UUID - - -class ActivityEvent(ActivityEventCreate, InternalModel): - """ActivityEvent of a schedule""" - - id: uuid.UUID - - -class FlowEventCreate(InternalModel): - flow_id: uuid.UUID - event_id: uuid.UUID - - -class FlowEvent(FlowEventCreate, InternalModel): - """FlowEvent of a schedule""" - - id: uuid.UUID - - class NotificationSettingCreate(BaseNotificationSetting, InternalModel): event_id: uuid.UUID @@ -128,6 +93,7 @@ class EventFull(InternalModel, BaseEvent): activity_id: uuid.UUID | None = None flow_id: uuid.UUID | None = None version: str + event_type: EventType class ScheduleEvent(EventFull): diff --git a/src/apps/schedule/service/schedule.py b/src/apps/schedule/service/schedule.py index 417a605ac73..2356de0780e 100644 --- a/src/apps/schedule/service/schedule.py +++ b/src/apps/schedule/service/schedule.py @@ -6,23 +6,21 @@ from apps.activity_flows.crud import FlowsCRUD from apps.applets.crud import AppletsCRUD, UserAppletAccessCRUD from apps.applets.errors import AppletNotFoundError -from apps.schedule.crud.events import ActivityEventsCRUD, EventCRUD, FlowEventsCRUD, UserEventsCRUD +from apps.schedule.crud.events import EventCRUD from apps.schedule.crud.notification import NotificationCRUD, ReminderCRUD +from apps.schedule.crud.schedule_history import NotificationHistoryCRUD, ReminderHistoryCRUD from apps.schedule.db.schemas import EventSchema, NotificationSchema -from apps.schedule.domain.constants import DefaultEvent, PeriodicityType +from apps.schedule.domain.constants import DefaultEvent, EventType, PeriodicityType from apps.schedule.domain.schedule import BaseEvent from apps.schedule.domain.schedule.internal import ( - ActivityEventCreate, Event, EventCreate, EventFull, EventUpdate, - FlowEventCreate, NotificationSetting, ReminderSetting, ReminderSettingCreate, ScheduleEvent, - UserEventCreate, ) from apps.schedule.domain.schedule.public import ( PublicEvent, @@ -95,25 +93,16 @@ async def create_schedule(self, schedule: EventRequest, applet_id: uuid.UUID) -> start_date=schedule.periodicity.start_date, end_date=schedule.periodicity.end_date, selected_date=schedule.periodicity.selected_date, + user_id=schedule.respondent_id, + activity_id=schedule.activity_id, + activity_flow_id=schedule.flow_id, + event_type=EventType.ACTIVITY if schedule.activity_id else EventType.FLOW, ) ) - # Create user event - if schedule.respondent_id: - await UserEventsCRUD(self.session).save(UserEventCreate(event_id=event.id, user_id=schedule.respondent_id)) - # Create event-activity or event-flow - if schedule.activity_id: - await ActivityEventsCRUD(self.session).save( - ActivityEventCreate(event_id=event.id, activity_id=schedule.activity_id) - ) - else: - await FlowEventsCRUD(self.session).save(FlowEventCreate(event_id=event.id, flow_id=schedule.flow_id)) - schedule_event = ScheduleEvent( - **event.dict(exclude={"applet_id"}), - activity_id=schedule.activity_id, - flow_id=schedule.flow_id, - user_id=schedule.respondent_id, + **event.dict(exclude={"applet_id", "activity_flow_id"}), + flow_id=event.activity_flow_id, ) # Create notification and reminder @@ -177,7 +166,6 @@ async def create_schedule(self, schedule: EventRequest, applet_id: uuid.UUID) -> selected_date=event.selected_date, ), respondent_id=schedule.respondent_id, - activity_id=schedule.activity_id, flow_id=schedule.flow_id, notification=notification_public if schedule.notification else None, ) @@ -187,22 +175,18 @@ async def get_schedule_by_id(self, schedule_id: uuid.UUID, applet_id: uuid.UUID) await self._validate_applet(applet_id=applet_id) event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) - user_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=event.id) - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) notification = await self._get_notifications_and_reminder(event.id) return PublicEvent( - **event.dict(exclude={"periodicity"}), + **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=user_id, - activity_id=activity_id, - flow_id=flow_id, + respondent_id=event.user_id, + flow_id=event.activity_flow_id, notification=notification, ) @@ -224,24 +208,19 @@ async def get_all_schedules(self, applet_id: uuid.UUID, query: QueryParams | Non for event_schema in event_schemas: event: Event = Event.from_orm(event_schema) - - user_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=event.id) - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) notification = await self._get_notifications_and_reminder(event.id) events.append( PublicEvent( - **event.dict(exclude={"periodicity"}), + **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=user_id, - activity_id=activity_id, - flow_id=flow_id, + respondent_id=event.user_id, + flow_id=event.activity_flow_id, notification=notification, ) ) @@ -257,8 +236,6 @@ async def get_public_all_schedules(self, key: uuid.UUID) -> PublicEventByUser: full_events: list[EventFull] = [] for event_schema in event_schemas: event: Event = Event.from_orm(event_schema) - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) base_event = BaseEvent(**event.dict()) full_events.append( @@ -269,9 +246,11 @@ async def get_public_all_schedules(self, key: uuid.UUID) -> PublicEventByUser: start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, - activity_id=activity_id, - flow_id=flow_id, + activity_id=event.activity_id, + flow_id=event.activity_flow_id, + user_id=event.user_id, version=event.version, + event_type=event.event_type, ) ) @@ -298,10 +277,6 @@ async def delete_all_schedules(self, applet_id: uuid.UUID): event_schemas: list[EventSchema] = await EventCRUD(self.session).get_all_by_applet_id_with_filter(applet_id) event_ids = [event_schema.id for event_schema in event_schemas] - # Get all activity_ids and flow_ids - activity_ids = await ActivityEventsCRUD(self.session).get_by_event_ids(event_ids) - flow_ids = await FlowEventsCRUD(self.session).get_by_event_ids(event_ids) - await self._delete_by_ids(event_ids) await ScheduleHistoryService(self.session).mark_as_deleted( @@ -309,50 +284,54 @@ async def delete_all_schedules(self, applet_id: uuid.UUID): ) # Create default events for activities and flows - for activity_id in activity_ids: - await self._create_default_event(applet_id=applet_id, activity_id=activity_id, is_activity=True) - - for flow_id in flow_ids: - await self._create_default_event(applet_id=applet_id, activity_id=flow_id, is_activity=False) + processed_activities_and_flows: dict[uuid.UUID, bool] = {} + for event in event_schemas: + if event.activity_id and event.activity_id not in processed_activities_and_flows: + await self._create_default_event( + applet_id=applet_id, + activity_id=event.activity_id, + is_activity=True, + respondent_id=event.user_id, + ) + processed_activities_and_flows[event.activity_id] = True + if event.activity_flow_id and event.activity_flow_id not in processed_activities_and_flows: + await self._create_default_event( + applet_id=applet_id, + activity_id=event.activity_flow_id, + is_activity=False, + respondent_id=event.user_id, + ) + processed_activities_and_flows[event.activity_flow_id] = True async def delete_schedule_by_id(self, schedule_id: uuid.UUID) -> uuid.UUID | None: - event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) - respondent_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) + crud = EventCRUD(self.session) + event: Event = await crud.get_by_id(pk=schedule_id) - # Get activity_id or flow_id if exists - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) - - # Delete event-user, event-activity, event-flow await self._delete_by_ids(event_ids=[schedule_id]) await ScheduleHistoryService(self.session).mark_as_deleted([(event.id, event.version)]) - # Create default event for activity or flow if another event doesn't exist # noqa: E501 - if activity_id: - count_events = await ActivityEventsCRUD(self.session).count_by_activity( - activity_id=activity_id, respondent_id=respondent_id - ) + # Create default event for activity or flow if another event doesn't exist + if event.activity_id: + count_events = await crud.count_by_activity(activity_id=event.activity_id, respondent_id=event.user_id) if count_events == 0: await self._create_default_event( applet_id=event.applet_id, - activity_id=activity_id, + activity_id=event.activity_id, is_activity=True, - respondent_id=respondent_id, + respondent_id=event.user_id, ) - elif flow_id: - count_events = await FlowEventsCRUD(self.session).count_by_flow( - flow_id=flow_id, respondent_id=respondent_id - ) + elif event.activity_flow_id: + count_events = await crud.count_by_flow(flow_id=event.activity_flow_id, respondent_id=event.user_id) if count_events == 0: await self._create_default_event( applet_id=event.applet_id, - activity_id=flow_id, + activity_id=event.activity_flow_id, is_activity=False, - respondent_id=respondent_id, + respondent_id=event.user_id, ) - return respondent_id + return event.user_id async def update_schedule( self, @@ -364,22 +343,21 @@ async def update_schedule( await self._validate_applet(applet_id=applet_id) event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) - respondent_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) # Delete all events of this activity or flow - # if new periodicity type is "always" and old periodicity type is not "always" # noqa: E501 - if schedule.periodicity.type == PeriodicityType.ALWAYS and event.periodicity != PeriodicityType.ALWAYS: # noqa: E501 + # if new periodicity type is "always" and old periodicity type is not "always" + if schedule.periodicity.type == PeriodicityType.ALWAYS and event.periodicity != PeriodicityType.ALWAYS: await self._delete_by_activity_or_flow( applet_id=applet_id, - activity_id=activity_id, - flow_id=flow_id, - respondent_id=respondent_id, + activity_id=event.activity_id, + flow_id=event.activity_flow_id, + respondent_id=event.user_id, only_always_available=False, except_event_id=schedule_id, ) + old_event_version = event.version + # Update event event = await EventCRUD(self.session).update( pk=schedule_id, @@ -395,20 +373,27 @@ async def update_schedule( start_date=schedule.periodicity.start_date, end_date=schedule.periodicity.end_date, selected_date=schedule.periodicity.selected_date, + event_type=event.event_type, + activity_id=event.activity_id, + activity_flow_id=event.activity_flow_id, + user_id=event.user_id, ), ) schedule_event = ScheduleEvent( - **event.dict(exclude={"applet_id"}), - activity_id=activity_id, - flow_id=flow_id, - user_id=respondent_id, + **event.dict(exclude={"applet_id", "activity_flow_id"}), + flow_id=event.activity_flow_id, ) # Update notification await NotificationCRUD(self.session).delete_by_event_ids([schedule_id]) await ReminderCRUD(self.session).delete_by_event_ids([schedule_id]) + await asyncio.gather( + NotificationHistoryCRUD(self.session).mark_as_deleted([(event.id, old_event_version)]), + ReminderHistoryCRUD(self.session).mark_as_deleted([(event.id, old_event_version)]), + ) + notification_public = None if schedule.notification: notifications = None @@ -461,16 +446,15 @@ async def update_schedule( ) return PublicEvent( - **event.dict(exclude={"periodicity"}), + **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=respondent_id, - activity_id=activity_id, - flow_id=flow_id, + respondent_id=event.user_id, + flow_id=event.activity_flow_id, notification=notification_public, ) @@ -507,46 +491,43 @@ async def count_schedules(self, applet_id: uuid.UUID) -> PublicEventCount: event_count = PublicEventCount(activity_events=[], flow_events=[]) # Get list of activity-event ids - activity_counts = await ActivityEventsCRUD(self.session).count_by_applet(applet_id=applet_id) - - # Get list of flow-event ids - flow_counts = await FlowEventsCRUD(self.session).count_by_applet(applet_id=applet_id) + activity_counts, flow_counts = await EventCRUD(self.session).count_by_applet(applet_id=applet_id) event_count.activity_events = activity_counts if activity_counts else [] event_count.flow_events = flow_counts if flow_counts else [] return event_count - async def delete_by_user_id(self, applet_id, user_id): + async def delete_by_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> None: # Check if applet exists await self._validate_applet(applet_id=applet_id) # Check if user exists await self._validate_user(user_id=user_id) - # Get list of activity-event ids and flow-event ids for user to create default events # noqa: E501 - activities = await ActivityEventsCRUD(self.session).get_by_applet_and_user_id(applet_id, user_id) + event_schemas = await EventCRUD(self.session).get_all_by_applet_and_user(applet_id, user_id) - activity_ids = {activity.activity_id for activity in activities} + # List of event_ids for user for deletion + event_ids: list[uuid.UUID] = [] + activity_ids: set[uuid.UUID] = set() + flow_ids: set[uuid.UUID] = set() - flows = await FlowEventsCRUD(self.session).get_by_applet_and_user_id(applet_id, user_id) - flow_ids = {flow.flow_id for flow in flows} + for event in event_schemas: + event_ids.append(event.id) + if event.activity_id: + activity_ids.add(event.activity_id) + if event.flow_id: + flow_ids.add(event.flow_id) - # Get list of event_ids for user and delete them all - event_schemas = await EventCRUD(self.session).get_all_by_applet_and_user(applet_id, user_id) - event_ids = [event_schema.id for event_schema in event_schemas] if not event_ids: raise ScheduleNotFoundError() - await self._delete_by_ids( - event_ids=event_ids, - user_id=user_id, - ) + await self._delete_by_ids(event_ids=event_ids) await ScheduleHistoryService(self.session).mark_as_deleted( [(event.id, event.version) for event in event_schemas] ) - # Create AA events for all activities and flows + # Create always available events for all activities and flows await self.create_default_schedules( applet_id=applet_id, activity_ids=list(activity_ids), @@ -608,8 +589,7 @@ async def _delete_by_activity_or_flow( only_always_available, ) - clean_events = [event for event in event_schemas if event.id != except_event_id] - event_ids = [event.id for event in clean_events] + event_ids = [event.id for event in event_schemas if event.id != except_event_id] if event_ids: await self._delete_by_ids(event_ids=event_ids) @@ -620,18 +600,7 @@ async def _delete_by_activity_or_flow( async def _delete_by_ids( self, event_ids: list[uuid.UUID], - user_id: uuid.UUID | None = None, ): - if user_id: - await UserEventsCRUD(self.session).delete_all_by_events_and_user( - event_ids, - user_id, - ) - else: - await UserEventsCRUD(self.session).delete_all_by_event_ids(event_ids) - - await ActivityEventsCRUD(self.session).delete_all_by_event_ids(event_ids) - await FlowEventsCRUD(self.session).delete_all_by_event_ids(event_ids) await NotificationCRUD(self.session).delete_by_event_ids(event_ids) await ReminderCRUD(self.session).delete_by_event_ids(event_ids) await EventCRUD(self.session).delete_by_ids(event_ids) @@ -882,10 +851,7 @@ async def remove_individual_calendar(self, user_id: uuid.UUID, applet_id: uuid.U if not event_ids: raise ScheduleNotFoundError() - await self._delete_by_ids( - event_ids=event_ids, - user_id=user_id, - ) + await self._delete_by_ids(event_ids=event_ids) await ScheduleHistoryService(self.session).mark_as_deleted( [(event.id, event.version) for event in event_schemas] @@ -947,7 +913,7 @@ async def create_default_schedules_if_not_exist( activity_ids: list[uuid.UUID], ) -> None: """Create default schedules for applet.""" - activities_without_events = await ActivityEventsCRUD(self.session).get_missing_events(activity_ids) + activities_without_events = await EventCRUD(self.session).get_activities_without_events(activity_ids) await self.create_default_schedules( applet_id=applet_id, activity_ids=activities_without_events, diff --git a/src/apps/schedule/service/schedule_history.py b/src/apps/schedule/service/schedule_history.py index 2ec2fb65429..06b3e9a17cf 100644 --- a/src/apps/schedule/service/schedule_history.py +++ b/src/apps/schedule/service/schedule_history.py @@ -27,6 +27,9 @@ def __init__(self, session): async def add_history(self, applet_id: uuid.UUID, event: ScheduleEvent): applet = await AppletsCRUD(self.session).get_by_id(applet_id) + # Refresh the applet so we don't get the old version number, in case the version has changed + await self.session.refresh(applet) + event_history = await ScheduleHistoryCRUD(self.session).add( EventHistorySchema( start_time=event.start_time, diff --git a/src/apps/workspaces/crud/user_applet_access.py b/src/apps/workspaces/crud/user_applet_access.py index b2f46c1c970..05579baee1b 100644 --- a/src/apps/workspaces/crud/user_applet_access.py +++ b/src/apps/workspaces/crud/user_applet_access.py @@ -29,7 +29,7 @@ from apps.applets.db.schemas import AppletSchema from apps.invitations.constants import InvitationStatus from apps.invitations.db import InvitationSchema -from apps.schedule.db.schemas import EventSchema, UserEventsSchema +from apps.schedule.db.schemas import EventSchema from apps.shared.encryption import get_key from apps.shared.filtering import FilterField, Filtering from apps.shared.ordering import Ordering @@ -402,10 +402,9 @@ async def get_workspace_respondents( workspace_applets_sq = self.workspace_applets_subquery(owner_id, applet_id) schedule_exists = ( - select(UserEventsSchema) - .join(EventSchema, EventSchema.id == UserEventsSchema.event_id) + select(EventSchema) .where( - UserEventsSchema.user_id == UserAppletAccessSchema.user_id, + EventSchema.user_id == UserAppletAccessSchema.user_id, EventSchema.applet_id == UserAppletAccessSchema.applet_id, ) .exists() @@ -1047,12 +1046,11 @@ async def get_respondent_accesses_by_owner_id( page: int, limit: int, ) -> list[RespondentAppletAccess]: - individual_event_query: Query = select(UserEventsSchema.id) - individual_event_query = individual_event_query.join(EventSchema, EventSchema.id == UserEventsSchema.event_id) + individual_event_query: Query = select(EventSchema.id) individual_event_query = individual_event_query.where( - UserEventsSchema.user_id == UserAppletAccessSchema.user_id + EventSchema.user_id == UserAppletAccessSchema.user_id, + EventSchema.applet_id == UserAppletAccessSchema.applet_id, ) - individual_event_query = individual_event_query.where(EventSchema.applet_id == UserAppletAccessSchema.applet_id) query: Query = select( SubjectSchema.secret_user_id, diff --git a/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py b/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py new file mode 100644 index 00000000000..69c43bf080d --- /dev/null +++ b/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py @@ -0,0 +1,174 @@ +"""Clean up schedule tables + +Revision ID: 3059a8ad6ec5 +Revises: 7c7e30fa96a4 +Create Date: 2025-02-02 18:39:01.011295 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "3059a8ad6ec5" +down_revision = "7c7e30fa96a4" +branch_labels = None +depends_on = None + +EVENT_TYPE_ENUM = 'event_type_enum' +EVENT_TYPE_ENUM_VALUES = ['activity', 'flow'] + + +def upgrade() -> None: + # Add columns `event_type`, `activity_id`, `activity_flow_id`, and `user_id` to `events` + op.add_column("events", sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=True)) + op.add_column("events", sa.Column("activity_flow_id", postgresql.UUID(as_uuid=True), nullable=True)) + op.add_column("events", sa.Column("user_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("users.id", ondelete="RESTRICT"), nullable=True)) + op.add_column("events", sa.Column("event_type", postgresql.ENUM(*EVENT_TYPE_ENUM_VALUES, name=EVENT_TYPE_ENUM, create_type=False), nullable=True)) + + # Migrate data from `activity_events`, `flow_events`, and `user_events` to `events` + op.execute(""" + UPDATE events dst + SET activity_id=ae.activity_id, + activity_flow_id = fe.flow_id, + user_id=ue.user_id, + event_type=(CASE WHEN ae.activity_id IS NOT NULL THEN 'activity' ELSE 'flow' END)::event_type_enum + FROM events e + LEFT JOIN activity_events ae ON e.id = ae.event_id + LEFT JOIN flow_events fe ON e.id = fe.event_id + LEFT JOIN user_events ue ON e.id = ue.event_id + WHERE dst.id = e.id + """) + + # Make sure that the `event_type` column is not null + op.alter_column("events", "event_type", nullable=False) + + # Drop the `periodicity_id` column from the `events` table + op.drop_column("events", "periodicity_id") + + # Drop tables + op.drop_table("activity_events") + op.drop_table("flow_events") + op.drop_table("user_events") + op.drop_table("periodicity") + + +def downgrade() -> None: + # Recreate the dropped tables + op.create_table( + "activity_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_activity_events")), + sa.UniqueConstraint("activity_id", "event_id", "is_deleted", name="_unique_activity_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_activity_events_event_id_events"), ondelete="CASCADE"), + ) + + op.create_table( + "flow_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("flow_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_flow_events")), + sa.UniqueConstraint("flow_id", "event_id", "is_deleted", name="_unique_flow_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_flow_events_event_id_events"), ondelete="CASCADE"), + ) + + op.create_table( + "user_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_user_events")), + sa.UniqueConstraint("user_id", "event_id", "is_deleted", name="_unique_user_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_user_events_event_id_events"), ondelete="CASCADE"), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], name=op.f("fk_user_events_user_id_users"), ondelete="RESTRICT"), + ) + + op.create_table( + "periodicity", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("type", sa.String(10), nullable=False), + sa.Column("start_date", sa.Date(), nullable=True), + sa.Column("end_date", sa.Date(), nullable=True), + sa.Column("selected_date", sa.Date(), nullable=True), + sa.PrimaryKeyConstraint("id", name=op.f("pk_periodicity")), + ) + + # Add the `periodicity_id` column back to the `events` table + op.add_column( + "events", + sa.Column( + "periodicity_id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False + ) + ) + + # Generate periodicity IDs for existing events + op.execute(""" + UPDATE events + SET periodicity_id = gen_random_uuid() + WHERE periodicity_id IS NULL + """) + + # Repopulate the `activity_events`, `flow_events`, `user_events`, and `periodicity` tables + # We do lose some data here (e.g. the original `id`, `created_at`, `updated_at`, `migrated_date`, `migrated_updated`), + # because we can't recover that data from the `events` table + op.execute(""" + INSERT INTO activity_events (id, is_deleted, activity_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.activity_id, e.id + FROM events e + WHERE e.activity_id IS NOT NULL + AND e.event_type = 'activity' + """) + + op.execute(""" + INSERT INTO flow_events (id, is_deleted, flow_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.activity_flow_id, e.id + FROM events e + WHERE e.activity_flow_id IS NOT NULL + AND e.event_type = 'flow' + """) + + op.execute(""" + INSERT INTO user_events (id, is_deleted, user_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.user_id, e.id + FROM events e + WHERE e.user_id IS NOT NULL + """) + + op.execute(""" + INSERT INTO periodicity (id, is_deleted, type, start_date, end_date, selected_date) + SELECT e.periodicity_id, e.is_deleted, e.periodicity, e.start_date, e.end_date, e.selected_date + FROM events e + """) + + # Drop the new columns from the `events` table + op.drop_column("events", "activity_id") + op.drop_column("events", "activity_flow_id") + op.drop_column("events", "user_id") + op.drop_column("events", "event_type") From 0458132ff28bba26dc42d2e879998bcd0daf23c9 Mon Sep 17 00:00:00 2001 From: Kenroy Gobourne <14842108+sultanofcardio@users.noreply.github.com> Date: Wed, 19 Feb 2025 20:29:11 -0500 Subject: [PATCH 04/14] fix: Scheduling History Logic Fixes (M2-8494) (M2-8717) (M2-8725) (#1746) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR fixes the following issues: - After adding a new activity to an applet, its entry to `applet_events` uses the old applet version number - Adding a new scheduled event for an activity/flow removes all its previous scheduled event(s). There can be only one "always available" event, whereas many scheduled events per activity/flow should be allowed - In the *notification_histories* table, the *is_deleted* column doesn't become `true` after removing the notification from the event - In the *reminder_histories* table, the *is_deleted* column doesn’t become `true` after removing the reminder from the event From e385aadce3f9ac2ef6ccc1e180c81098eb2909f5 Mon Sep 17 00:00:00 2001 From: Kenroy Gobourne <14842108+sultanofcardio@users.noreply.github.com> Date: Fri, 21 Feb 2025 09:46:53 -0500 Subject: [PATCH 05/14] fix: Fix linking of individual events to new applet versions in applet_events (M2-8743) (#1749) This PR updates the query being used to find all the events linked to an applet when that applet is updated. This list of events is used as the base for which rows get inserted into `applet_events` whenever the applet version changes without changing any event versions. Previously, the query being used was excluding individual events by adding the condition `WHERE user_id = NULL` --- src/apps/schedule/crud/events.py | 8 ++++++++ src/apps/schedule/service/schedule_history.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/apps/schedule/crud/events.py b/src/apps/schedule/crud/events.py index 5ebc4870db7..583fc0bd8ba 100644 --- a/src/apps/schedule/crud/events.py +++ b/src/apps/schedule/crud/events.py @@ -76,6 +76,14 @@ async def get_all_by_applet_id_with_filter( result = await self._execute(query) return result.scalars().all() + async def get_all_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: + """Return all events linked to a specific applet""" + query: Query = select(EventSchema) + query = query.where(EventSchema.applet_id == applet_id, EventSchema.is_deleted.is_(False)) + + result = await self._execute(query) + return result.scalars().all() + async def get_public_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: """Return event instance.""" query: Query = select(EventSchema) diff --git a/src/apps/schedule/service/schedule_history.py b/src/apps/schedule/service/schedule_history.py index 06b3e9a17cf..62df4620d7a 100644 --- a/src/apps/schedule/service/schedule_history.py +++ b/src/apps/schedule/service/schedule_history.py @@ -100,7 +100,7 @@ async def update_applet_event_links( This method is useful when an applet has its version bumped and the events are not updated. The previous entries in `applet_events` are not removed to maintain the history of the applet. """ - events = await EventCRUD(self.session).get_all_by_applet_id_with_filter(applet_id) + events = await EventCRUD(self.session).get_all_by_applet_id(applet_id) if len(events) > 0: await AppletEventsCRUD(self.session).add_many( From 21156afd5081ca493dec4c336603fb848dacde2f Mon Sep 17 00:00:00 2001 From: Carlos Chacon Date: Mon, 24 Feb 2025 09:28:49 -0600 Subject: [PATCH 06/14] Revert "chore: Schedules database clean up (M2-8495) (#1723)" This reverts commit 11571c7f69479c1e67c0989725bc509c6afb423e. --- src/apps/activities/services/activity.py | 7 +- src/apps/activity_flows/service/flow.py | 7 +- src/apps/applets/commands/applet_ema.py | 78 +- src/apps/schedule/crud/events.py | 811 +++++++++++++----- src/apps/schedule/db/schemas.py | 70 +- src/apps/schedule/domain/schedule/internal.py | 46 +- src/apps/schedule/service/schedule.py | 210 +++-- src/apps/schedule/service/schedule_history.py | 3 - .../workspaces/crud/user_applet_access.py | 14 +- ...25_02_02_18_39-clean_up_schedule_tables.py | 174 ---- 10 files changed, 906 insertions(+), 514 deletions(-) delete mode 100644 src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py diff --git a/src/apps/activities/services/activity.py b/src/apps/activities/services/activity.py index d8d6dc94f48..20eb9bb68ff 100644 --- a/src/apps/activities/services/activity.py +++ b/src/apps/activities/services/activity.py @@ -23,8 +23,7 @@ from apps.activity_assignments.service import ActivityAssignmentService from apps.activity_flows.crud import FlowsCRUD from apps.applets.crud import AppletsCRUD, UserAppletAccessCRUD -from apps.schedule.crud.events import EventCRUD -from apps.schedule.domain.constants import EventType +from apps.schedule.crud.events import ActivityEventsCRUD, EventCRUD from apps.schedule.service.schedule import ScheduleService from apps.workspaces.domain.constants import Role from infrastructure.logger import logger @@ -116,9 +115,9 @@ async def update_create(self, applet_id: uuid.UUID, activities_create: list[Acti activity_id_key_map: dict[uuid.UUID, uuid.UUID] = dict() prepared_activity_items = list() - activity_events = await EventCRUD(self.session).get_by_type_and_applet_id(applet_id, EventType.ACTIVITY) + all_activities = await ActivityEventsCRUD(self.session).get_by_applet_id(applet_id) - all_activity_ids = [activity.activity_id for activity in activity_events if activity.activity_id is not None] + all_activity_ids = [activity.activity_id for activity in all_activities] # Save new activity ids new_activities = [] diff --git a/src/apps/activity_flows/service/flow.py b/src/apps/activity_flows/service/flow.py index 09622746247..391d67d7c68 100644 --- a/src/apps/activity_flows/service/flow.py +++ b/src/apps/activity_flows/service/flow.py @@ -15,8 +15,7 @@ from apps.activity_flows.service.flow_item import FlowItemService from apps.applets.crud import UserAppletAccessCRUD from apps.applets.domain.applet_history import Version -from apps.schedule.crud.events import EventCRUD -from apps.schedule.domain.constants import EventType +from apps.schedule.crud.events import EventCRUD, FlowEventsCRUD from apps.schedule.service.schedule import ScheduleService from apps.workspaces.domain.constants import Role @@ -90,9 +89,7 @@ async def update_create( schemas = list() prepared_flow_items = list() - flow_events = await EventCRUD(self.session).get_by_type_and_applet_id(applet_id, EventType.FLOW) - - all_flows = [flow_event.activity_flow_id for flow_event in flow_events if flow_event.activity_flow_id] + all_flows = [flow.flow_id for flow in await FlowEventsCRUD(self.session).get_by_applet_id(applet_id)] # Save new flow ids new_flows = [] diff --git a/src/apps/applets/commands/applet_ema.py b/src/apps/applets/commands/applet_ema.py index ae08cb5ac72..f14072bf799 100644 --- a/src/apps/applets/commands/applet_ema.py +++ b/src/apps/applets/commands/applet_ema.py @@ -24,7 +24,13 @@ from apps.job.constants import JobStatus from apps.job.errors import JobStatusError from apps.job.service import JobService -from apps.schedule.db.schemas import EventSchema +from apps.schedule.db.schemas import ( + ActivityEventsSchema, + EventSchema, + FlowEventsSchema, + PeriodicitySchema, + UserEventsSchema, +) from apps.schedule.domain.constants import PeriodicityType from apps.shared.domain.base import PublicModel from apps.subjects.db.schemas import SubjectSchema @@ -233,24 +239,27 @@ async def get_user_flow_events( select( EventSchema.applet_id, EventSchema.id.label("event_id"), - EventSchema.user_id, - EventSchema.activity_flow_id.label("flow_id"), - EventSchema.periodicity.label("event_type"), + UserEventsSchema.user_id, + FlowEventsSchema.flow_id, + PeriodicitySchema.type.label("event_type"), case( ( - EventSchema.periodicity.in_(("WEEKDAYS", "DAILY")), + PeriodicitySchema.type.in_(("WEEKDAYS", "DAILY")), scheduled_date, ), - (EventSchema.periodicity.in_(("WEEKLY", "MONTHLY")), EventSchema.start_date), - else_=EventSchema.selected_date, + (PeriodicitySchema.type.in_(("WEEKLY", "MONTHLY")), PeriodicitySchema.start_date), + else_=PeriodicitySchema.selected_date, ).label("selected_date"), - EventSchema.start_date, - EventSchema.end_date, + PeriodicitySchema.start_date, + PeriodicitySchema.end_date, EventSchema.start_time, EventSchema.end_time, ) .select_from(EventSchema) - .where(EventSchema.is_deleted == false(), EventSchema.periodicity != PeriodicityType.ALWAYS) + .join(UserEventsSchema, UserEventsSchema.event_id == EventSchema.id) + .join(PeriodicitySchema, PeriodicitySchema.id == EventSchema.periodicity_id) + .join(FlowEventsSchema, FlowEventsSchema.event_id == EventSchema.id) + .where(EventSchema.is_deleted == false(), PeriodicitySchema.type != PeriodicityType.ALWAYS) ).cte("user_flow_events") query = ( @@ -318,14 +327,14 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) case PeriodicityType.DAILY: if row.is_crossday_event: row.end_date += datetime.timedelta(days=1) - if row.start_date <= schedule_date <= row.end_date: + if schedule_date >= row.start_date and schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.ONCE: schedule_start_date = row.selected_date row.end_date = row.selected_date if row.is_crossday_event: row.end_date += datetime.timedelta(days=1) - if schedule_start_date <= schedule_date <= row.end_date: + if schedule_date >= schedule_start_date and schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.WEEKDAYS: last_weekday = FRIDAY_WEEKDAY @@ -333,7 +342,11 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) last_weekday = SATURDAY_WEEKDAY if row.end_date.weekday() == FRIDAY_WEEKDAY: row.end_date += datetime.timedelta(days=1) - if schedule_date.weekday() <= last_weekday and row.start_date <= schedule_date <= row.end_date: + if ( + schedule_date.weekday() <= last_weekday + and schedule_date >= row.start_date + and schedule_date <= row.end_date + ): filtered.append(row) case PeriodicityType.WEEKLY: scheduled_weekday = row.start_date.weekday() @@ -349,8 +362,10 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) if row.start_date.weekday() == row.end_date.weekday(): row.end_date += datetime.timedelta(days=1) if ( - schedule_date.weekday() == scheduled_weekday or schedule_date.weekday() == following_weekday - ) and row.start_date <= schedule_date <= row.end_date: + (schedule_date.weekday() == scheduled_weekday or schedule_date.weekday() == following_weekday) + and schedule_date >= row.start_date + and schedule_date <= row.end_date + ): filtered.append(row) case PeriodicityType.MONTHLY: scheduled_monthday = row.start_date.day @@ -367,10 +382,14 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) ): row.end_date += datetime.timedelta(days=1) if ( - schedule_date.day == scheduled_monthday - or schedule_date.day == following_monthday - or (is_last_day_of_month(schedule_date) and row.start_date) - ) and row.start_date <= schedule_date <= row.end_date: + ( + schedule_date.day == scheduled_monthday + or schedule_date.day == following_monthday + or (is_last_day_of_month(schedule_date) and row.start_date) + ) + and schedule_date >= row.start_date + and schedule_date <= row.end_date + ): filtered.append(row) return filtered @@ -486,24 +505,27 @@ async def get_user_activity_events( select( EventSchema.applet_id, EventSchema.id.label("event_id"), - EventSchema.user_id, - EventSchema.activity_id, - EventSchema.periodicity.label("event_type"), + UserEventsSchema.user_id, + ActivityEventsSchema.activity_id, + PeriodicitySchema.type.label("event_type"), case( ( - EventSchema.periodicity.in_(("WEEKDAYS", "DAILY")), + PeriodicitySchema.type.in_(("WEEKDAYS", "DAILY")), scheduled_date, ), - (EventSchema.periodicity.in_(("WEEKLY", "MONTHLY")), EventSchema.start_date), - else_=EventSchema.selected_date, + (PeriodicitySchema.type.in_(("WEEKLY", "MONTHLY")), PeriodicitySchema.start_date), + else_=PeriodicitySchema.selected_date, ).label("selected_date"), - EventSchema.start_date, - EventSchema.end_date, + PeriodicitySchema.start_date, + PeriodicitySchema.end_date, EventSchema.start_time, EventSchema.end_time, ) .select_from(EventSchema) - .where(EventSchema.is_deleted == false(), EventSchema.periodicity != PeriodicityType.ALWAYS) + .join(UserEventsSchema, UserEventsSchema.event_id == EventSchema.id) + .join(PeriodicitySchema, PeriodicitySchema.id == EventSchema.periodicity_id) + .join(ActivityEventsSchema, ActivityEventsSchema.event_id == EventSchema.id) + .where(EventSchema.is_deleted == false(), PeriodicitySchema.type != PeriodicityType.ALWAYS) ).cte("user_activity_events") query = ( diff --git a/src/apps/schedule/crud/events.py b/src/apps/schedule/crud/events.py index 583fc0bd8ba..4be6ec97d44 100644 --- a/src/apps/schedule/crud/events.py +++ b/src/apps/schedule/crud/events.py @@ -4,31 +4,48 @@ from sqlalchemy import Integer, update from sqlalchemy.exc import IntegrityError, MultipleResultsFound, NoResultFound from sqlalchemy.orm import Query -from sqlalchemy.sql import and_, delete, func, or_, select +from sqlalchemy.sql import and_, delete, distinct, func, or_, select from sqlalchemy.sql.expression import case, cast from apps.activities.db.schemas import ActivitySchema from apps.activity_flows.db.schemas import ActivityFlowSchema from apps.schedule.db.schemas import ( + ActivityEventsSchema, EventSchema, + FlowEventsSchema, + UserEventsSchema, ) -from apps.schedule.domain.constants import EventType, PeriodicityType +from apps.schedule.domain.constants import PeriodicityType from apps.schedule.domain.schedule.internal import ( + ActivityEvent, + ActivityEventCreate, Event, EventCreate, EventFull, EventUpdate, + FlowEvent, + FlowEventCreate, + UserEvent, + UserEventCreate, ) from apps.schedule.domain.schedule.public import ActivityEventCount, FlowEventCount from apps.schedule.errors import ( + ActivityEventAlreadyExists, EventError, EventNotFoundError, + FlowEventAlreadyExists, + UserEventAlreadyExists, ) from apps.workspaces.db.schemas import UserAppletAccessSchema from apps.workspaces.domain.constants import Role from infrastructure.database import BaseCRUD -__all__ = ["EventCRUD"] +__all__ = [ + "EventCRUD", + "UserEventsCRUD", + "ActivityEventsCRUD", + "FlowEventsCRUD", +] class EventCRUD(BaseCRUD[EventSchema]): @@ -66,12 +83,17 @@ async def get_all_by_applet_id_with_filter( ) -> list[EventSchema]: """Return event instance.""" query: Query = select(EventSchema) + query = query.join( + UserEventsSchema, + UserEventsSchema.event_id == EventSchema.id, + isouter=True, + ) query = query.where(EventSchema.applet_id == applet_id) query = query.where(EventSchema.is_deleted.is_(False)) if respondent_id: - query = query.where(EventSchema.user_id == respondent_id) + query = query.where(UserEventsSchema.user_id == respondent_id) else: - query = query.where(EventSchema.user_id.is_(None)) + query = query.where(UserEventsSchema.user_id == None) # noqa: E711 result = await self._execute(query) return result.scalars().all() @@ -87,10 +109,15 @@ async def get_all_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: async def get_public_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: """Return event instance.""" query: Query = select(EventSchema) + query = query.join( + UserEventsSchema, + UserEventsSchema.event_id == EventSchema.id, + isouter=True, + ) query = query.where(EventSchema.applet_id == applet_id) query = query.distinct(EventSchema.id) - query = query.where(EventSchema.user_id.is_(None)) - query = query.where(EventSchema.is_deleted.is_(False)) + query = query.where(UserEventsSchema.user_id == None) # noqa: E711 + query = query.where(EventSchema.is_deleted == False) # noqa: E712 result = await self._execute(query) return result.scalars().all() @@ -133,12 +160,34 @@ async def update(self, pk: uuid.UUID, schema: EventUpdate) -> Event: async def get_all_by_applet_and_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[EventFull]: """Get events by applet_id and user_id""" - query: Query = select(EventSchema) - query = query.where( - EventSchema.applet_id == applet_id, EventSchema.user_id == user_id, EventSchema.is_deleted.is_(False) + query: Query = select( + EventSchema, + ActivityEventsSchema.activity_id, + FlowEventsSchema.flow_id, + ) + query = query.join( + UserEventsSchema, + and_( + EventSchema.id == UserEventsSchema.event_id, + UserEventsSchema.user_id == user_id, + ), ) + query = query.join( + FlowEventsSchema, + FlowEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + query = query.join( + ActivityEventsSchema, + ActivityEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(EventSchema.is_deleted == False) # noqa: E712 + db_result = await self._execute(query) events = [] @@ -158,9 +207,8 @@ async def get_all_by_applet_and_user(self, applet_id: uuid.UUID, user_id: uuid.U start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.EventSchema.activity_id, - flow_id=row.EventSchema.activity_flow_id, - event_type=row.EventSchema.event_type, + activity_id=row.activity_id, + flow_id=row.flow_id, ) ) return events @@ -175,12 +223,32 @@ async def get_all_by_applets_and_user( """Get events by applet_ids and user_id Return {applet_id: [EventFull]}""" - query: Query = select(EventSchema) - query = query.where( - EventSchema.applet_id.in_(applet_ids), - EventSchema.is_deleted.is_(False), - EventSchema.user_id == user_id, + query: Query = select( + EventSchema, + ActivityEventsSchema.activity_id, + FlowEventsSchema.flow_id, ) + query = query.join( + UserEventsSchema, + and_( + EventSchema.id == UserEventsSchema.event_id, + UserEventsSchema.user_id == user_id, + ), + ) + + query = query.join( + FlowEventsSchema, + FlowEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + query = query.join( + ActivityEventsSchema, + ActivityEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + + query = query.where(EventSchema.applet_id.in_(applet_ids)) + query = query.where(EventSchema.is_deleted == False) # noqa: E712 if min_end_date and max_start_date: query = query.where( or_( @@ -216,7 +284,7 @@ async def get_all_by_applets_and_user( id=row.EventSchema.id, start_time=row.EventSchema.start_time, end_time=row.EventSchema.end_time, - access_before_schedule=row.EventSchema.access_before_schedule, + access_before_schedule=row.EventSchema.access_before_schedule, # noqa: E501 one_time_completion=row.EventSchema.one_time_completion, timer=row.EventSchema.timer, timer_type=row.EventSchema.timer_type, @@ -226,9 +294,8 @@ async def get_all_by_applets_and_user( start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.EventSchema.activity_id, - flow_id=row.EventSchema.activity_flow_id, - event_type=row.EventSchema.event_type, + activity_id=row.activity_id, + flow_id=row.flow_id, ) ) @@ -249,15 +316,26 @@ async def get_all_by_applet_and_activity( ) -> list[EventSchema]: """Get events by applet_id and activity_id""" query: Query = select(EventSchema) + query = query.join( + ActivityEventsSchema, + and_( + EventSchema.id == ActivityEventsSchema.event_id, + ActivityEventsSchema.activity_id == activity_id, + ), + ) + # differentiate general and individual events + query = query.join( + UserEventsSchema, + EventSchema.id == UserEventsSchema.event_id, + isouter=True, + ) # select only always available if requested if only_always_available: - query = query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) - query = query.where( - EventSchema.applet_id == applet_id, - EventSchema.is_deleted.is_(False), - EventSchema.activity_id == activity_id, - EventSchema.user_id == respondent_id, - ) + query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(EventSchema.is_deleted.is_(False)) + + query = query.where(UserEventsSchema.user_id == respondent_id) result = await self._execute(query) return result.scalars().all() @@ -274,18 +352,34 @@ async def validate_existing_always_available( query = query.select_from(EventSchema) if activity_id: - query = query.where(EventSchema.activity_id == activity_id) + query = query.join( + ActivityEventsSchema, + and_( + EventSchema.id == ActivityEventsSchema.event_id, + ActivityEventsSchema.activity_id == activity_id, + ), + ) if flow_id: - query = query.where(EventSchema.activity_flow_id == flow_id) + query = query.join( + FlowEventsSchema, + and_( + EventSchema.id == FlowEventsSchema.event_id, + FlowEventsSchema.flow_id == flow_id, + ), + ) - query = query.where( - EventSchema.periodicity == PeriodicityType.ALWAYS, - EventSchema.applet_id == applet_id, - EventSchema.is_deleted.is_(False), - EventSchema.user_id == respondent_id, + # differentiate general and individual events + query = query.join( + UserEventsSchema, + EventSchema.id == UserEventsSchema.event_id, + isouter=True, ) + query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(EventSchema.is_deleted.is_(False)) + query = query.where(UserEventsSchema.user_id == respondent_id) query = query.limit(1) result = await self._execute(query) @@ -300,48 +394,102 @@ async def get_all_by_applet_and_flow( ) -> list[EventSchema]: """Get events by applet_id and flow_id""" query: Query = select(EventSchema) + query = query.join( + FlowEventsSchema, + and_( + EventSchema.id == FlowEventsSchema.event_id, + FlowEventsSchema.flow_id == flow_id, + ), + ) + + # differentiate general and individual events + query = query.join( + UserEventsSchema, + EventSchema.id == UserEventsSchema.event_id, + isouter=True, + ) # select only always available if requested if only_always_available: - query = query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) + query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) - query = query.where( - EventSchema.applet_id == applet_id, - EventSchema.is_deleted.is_(False), - EventSchema.user_id == respondent_id, - EventSchema.activity_flow_id == flow_id, - ) + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(EventSchema.is_deleted.is_(False)) + + query = query.where(UserEventsSchema.user_id == respondent_id) result = await self._execute(query) return result.scalars().all() async def get_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[EventFull]: """Get general events by applet_id and user_id""" - - # select flow and activity ids to exclude - ids = ( - select( - func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), + # select flow_ids to exclude + flow_ids = ( + select(distinct(FlowEventsSchema.flow_id)) + .select_from(FlowEventsSchema) + .join( + UserEventsSchema, + UserEventsSchema.event_id == FlowEventsSchema.event_id, + ) + .join( + EventSchema, + EventSchema.id == FlowEventsSchema.event_id, + ) + .where(UserEventsSchema.user_id == user_id) + .where(EventSchema.applet_id == applet_id) + ) + activity_ids = ( + select(distinct(ActivityEventsSchema.activity_id)) + .select_from(ActivityEventsSchema) + .join( + UserEventsSchema, + UserEventsSchema.event_id == ActivityEventsSchema.event_id, + ) + .join( + EventSchema, + EventSchema.id == ActivityEventsSchema.event_id, ) - .select_from(EventSchema) - .where(EventSchema.user_id == user_id, EventSchema.applet_id == applet_id) - .group_by("entity_id") + .where(UserEventsSchema.user_id == user_id) + .where(EventSchema.applet_id == applet_id) ) - query: Query = select(EventSchema) + query: Query = select( + EventSchema, + ActivityEventsSchema.activity_id, + FlowEventsSchema.flow_id, + ) + + query = query.join( + FlowEventsSchema, + FlowEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + query = query.join( + ActivityEventsSchema, + ActivityEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + query = query.join( + UserEventsSchema, + UserEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(EventSchema.is_deleted == False) # noqa: E712 query = query.where( - EventSchema.applet_id == applet_id, - EventSchema.is_deleted.is_(False), or_( - EventSchema.activity_flow_id.is_(None), - EventSchema.activity_flow_id.not_in(ids), - ), + FlowEventsSchema.flow_id.is_(None), + FlowEventsSchema.flow_id.not_in(flow_ids), + ) + ) + query = query.where( or_( - EventSchema.activity_id.is_(None), - EventSchema.activity_id.not_in(ids), - ), - EventSchema.user_id.is_(None), + ActivityEventsSchema.activity_id.is_(None), + ActivityEventsSchema.activity_id.not_in(activity_ids), + ) ) + query = query.where(UserEventsSchema.user_id == None) # noqa: E711 db_result = await self._execute(query) @@ -362,9 +510,8 @@ async def get_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.U start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.EventSchema.activity_id, - flow_id=row.EventSchema.activity_flow_id, - event_type=row.EventSchema.event_type, + activity_id=row.activity_id, + flow_id=row.flow_id, ) ) return events @@ -377,32 +524,73 @@ async def get_general_events_by_applets_and_user( max_start_date: date | None = None, ) -> tuple[dict[uuid.UUID, list[EventFull]], set[uuid.UUID]]: """Get general events by applet_id and user_id""" - - # select flow and activity ids to exclude - ids = ( - select( - func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), + # select flow_ids to exclude + flow_ids = ( + select(distinct(FlowEventsSchema.flow_id)) + .select_from(FlowEventsSchema) + .join( + UserEventsSchema, + UserEventsSchema.event_id == FlowEventsSchema.event_id, + ) + .join( + EventSchema, + EventSchema.id == FlowEventsSchema.event_id, + ) + .where(UserEventsSchema.user_id == user_id) + .where(EventSchema.applet_id.in_(applet_ids)) + ) + activity_ids = ( + select(distinct(ActivityEventsSchema.activity_id)) + .select_from(ActivityEventsSchema) + .join( + UserEventsSchema, + UserEventsSchema.event_id == ActivityEventsSchema.event_id, ) - .select_from(EventSchema) - .where(EventSchema.user_id == user_id, EventSchema.applet_id.in_(applet_ids)) - .group_by("entity_id") + .join( + EventSchema, + EventSchema.id == ActivityEventsSchema.event_id, + ) + .where(UserEventsSchema.user_id == user_id) + .where(EventSchema.applet_id.in_(applet_ids)) ) - query: Query = select(EventSchema) + query: Query = select( + EventSchema, + ActivityEventsSchema.activity_id, + FlowEventsSchema.flow_id, + ) + query = query.join( + FlowEventsSchema, + FlowEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + query = query.join( + ActivityEventsSchema, + ActivityEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + query = query.join( + UserEventsSchema, + UserEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + + query = query.where(EventSchema.applet_id.in_(applet_ids)) + query = query.where(EventSchema.is_deleted == False) # noqa: E712 query = query.where( - EventSchema.applet_id.in_(applet_ids), - EventSchema.is_deleted.is_(False), or_( - EventSchema.activity_flow_id.is_(None), - EventSchema.activity_flow_id.not_in(ids), - ), + FlowEventsSchema.flow_id.is_(None), + FlowEventsSchema.flow_id.not_in(flow_ids), + ) + ) + query = query.where( or_( - EventSchema.activity_id.is_(None), - EventSchema.activity_id.not_in(ids), - ), - EventSchema.user_id.is_(None), + ActivityEventsSchema.activity_id.is_(None), + ActivityEventsSchema.activity_id.not_in(activity_ids), + ) ) + query = query.where(UserEventsSchema.user_id == None) # noqa: E711 if min_end_date and max_start_date: query = query.where( or_( @@ -448,9 +636,8 @@ async def get_general_events_by_applets_and_user( start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.EventSchema.activity_id, - flow_id=row.EventSchema.activity_flow_id, - event_type=row.EventSchema.event_type, + activity_id=row.activity_id, + flow_id=row.flow_id, ) ) @@ -458,34 +645,70 @@ async def get_general_events_by_applets_and_user( async def count_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> int: """Count general events by applet_id and user_id""" - - # select flow and activity ids to exclude - ids = ( - select( - func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), + flow_ids = ( + select(distinct(FlowEventsSchema.flow_id)) + .select_from(FlowEventsSchema) + .join( + UserEventsSchema, + UserEventsSchema.event_id == FlowEventsSchema.event_id, + ) + .join( + EventSchema, + EventSchema.id == FlowEventsSchema.event_id, ) - .select_from(EventSchema) - .where(EventSchema.user_id == user_id, EventSchema.applet_id == applet_id) - .group_by("entity_id") + .where(UserEventsSchema.user_id == user_id) + .where(EventSchema.applet_id == applet_id) + ) + activity_ids = ( + select(distinct(ActivityEventsSchema.activity_id)) + .select_from(ActivityEventsSchema) + .join( + UserEventsSchema, + UserEventsSchema.event_id == ActivityEventsSchema.event_id, + ) + .join( + EventSchema, + EventSchema.id == ActivityEventsSchema.event_id, + ) + .where(UserEventsSchema.user_id == user_id) + .where(EventSchema.applet_id == applet_id) ) query: Query = select( func.count(EventSchema.id).label("count"), ) + query = query.join( + FlowEventsSchema, + FlowEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + query = query.join( + ActivityEventsSchema, + ActivityEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + query = query.join( + UserEventsSchema, + UserEventsSchema.event_id == EventSchema.id, + isouter=True, + ) + + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(EventSchema.is_deleted == False) # noqa: E712 query = query.where( - EventSchema.applet_id == applet_id, - EventSchema.is_deleted.is_(False), or_( - EventSchema.activity_flow_id.is_(None), - EventSchema.activity_flow_id.not_in(ids), - ), + FlowEventsSchema.flow_id.is_(None), + FlowEventsSchema.flow_id.not_in(flow_ids), + ) + ) + query = query.where( or_( - EventSchema.activity_id.is_(None), - EventSchema.activity_id.not_in(ids), - ), - EventSchema.user_id.is_(None), + ActivityEventsSchema.activity_id.is_(None), + ActivityEventsSchema.activity_id.not_in(activity_ids), + ) ) + query = query.where(UserEventsSchema.user_id == None) # noqa: E711 db_result = await self._execute(query) return db_result.scalar() @@ -494,11 +717,16 @@ async def count_individual_events_by_user(self, applet_id: uuid.UUID, user_id: u """Count individual events by applet_id and user_id""" query: Query = select(func.count(EventSchema.id)) - query = query.where( - EventSchema.applet_id == applet_id, - EventSchema.is_deleted.is_(False), - EventSchema.user_id == user_id, + query = query.join( + UserEventsSchema, + and_( + EventSchema.id == UserEventsSchema.event_id, + UserEventsSchema.user_id == user_id, + ), ) + + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(EventSchema.is_deleted == False) # noqa: E712 db_result = await self._execute(query) return db_result.scalar() @@ -509,13 +737,21 @@ async def get_all_by_activity_flow_ids( is_activity: bool, ) -> list[EventSchema]: """Return events for given activity ids.""" - query: Query = select(EventSchema) - query = query.where(EventSchema.applet_id == applet_id) + query: Query = select(self.schema_class) + query = query.where(self.schema_class.applet_id == applet_id) if is_activity: - query = query.where(EventSchema.activity_id.in_(activity_ids)) + query = query.join( + ActivityEventsSchema, + ActivityEventsSchema.event_id == self.schema_class.id, + ) + query = query.where(ActivityEventsSchema.activity_id.in_(activity_ids)) else: - query = query.where(EventSchema.activity_flow_id.in_(activity_ids)) + query = query.join( + FlowEventsSchema, + FlowEventsSchema.event_id == self.schema_class.id, + ) + query = query.where(FlowEventsSchema.flow_id.in_(activity_ids)) result = await self._execute(query) events = result.scalars().all() @@ -523,140 +759,323 @@ async def get_all_by_activity_flow_ids( async def get_default_schedule_user_ids_by_applet_id(self, applet_id: uuid.UUID) -> list[uuid.UUID]: """Return user ids for default schedule.""" - individual_schedule_users = select(EventSchema.user_id).where( - EventSchema.applet_id == applet_id, - EventSchema.is_deleted.is_(False), - EventSchema.user_id.isnot(None), + individual_schedule_users = ( + select(UserEventsSchema.user_id) + .join(EventSchema, UserEventsSchema.event_id == EventSchema.id) + .where(EventSchema.applet_id == applet_id) + .where(EventSchema.is_deleted == False) # noqa: E712 ) - query: Query = select(UserAppletAccessSchema.user_id.label("user_id")) query = query.where(UserAppletAccessSchema.applet_id == applet_id) query = query.where(UserAppletAccessSchema.role == Role.RESPONDENT) - query = query.where(UserAppletAccessSchema.is_deleted.is_(False)) + query = query.where(UserAppletAccessSchema.is_deleted == False) # noqa: E712 query = query.where(UserAppletAccessSchema.user_id.not_in(individual_schedule_users)) result = await self._execute(query) result = result.scalars().all() return result - async def count_by_activity(self, activity_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: - """Return event count.""" + +class UserEventsCRUD(BaseCRUD[UserEventsSchema]): + schema_class = UserEventsSchema + + async def save(self, schema: UserEventCreate) -> UserEvent: + """Return user event instance and the created information.""" + try: + instance: UserEventsSchema = await self._create(UserEventsSchema(**schema.dict())) + except IntegrityError: + raise UserEventAlreadyExists(user_id=schema.user_id, event_id=schema.event_id) + + user_event: UserEvent = UserEvent.from_orm(instance) + return user_event + + async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: + """Return user event instances.""" + query: Query = select(distinct(UserEventsSchema.user_id)) + query = query.where(UserEventsSchema.event_id == event_id) + query = query.where(UserEventsSchema.is_deleted == False) # noqa: E712 + db_result = await self._execute(query) + + try: + result: uuid.UUID = db_result.scalars().one_or_none() + except MultipleResultsFound: + raise EventError() + + return result + + async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): + """Delete all user events by event ids.""" + query: Query = delete(UserEventsSchema) + query = query.where(UserEventsSchema.event_id.in_(event_ids)) + await self._execute(query) + + async def delete_all_by_events_and_user(self, event_ids: list[uuid.UUID], user_id: uuid.UUID): + """Delete all user events by event ids.""" + query: Query = delete(UserEventsSchema) + query = query.where(UserEventsSchema.event_id.in_(event_ids)) + query = query.where(UserEventsSchema.user_id == user_id) + await self._execute(query) + + +class ActivityEventsCRUD(BaseCRUD[ActivityEventsSchema]): + schema_class = ActivityEventsSchema + + async def save(self, schema: ActivityEventCreate) -> ActivityEvent: + """Return activity event instance and the created information.""" + + try: + instance: ActivityEventsSchema = await self._create(ActivityEventsSchema(**schema.dict())) + except IntegrityError: + raise ActivityEventAlreadyExists(activity_id=schema.activity_id, event_id=schema.event_id) + + activity_event: ActivityEvent = ActivityEvent.from_orm(instance) + return activity_event + + async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: + """Return activity event instances.""" + query: Query = select(ActivityEventsSchema.activity_id) + query = query.where(ActivityEventsSchema.event_id == event_id) + query = query.where( + ActivityEventsSchema.is_deleted == False # noqa: E712 + ) + result = await self._execute(query) + + try: + activity_id = result.scalars().one_or_none() + except MultipleResultsFound: + raise EventError() + return activity_id + + async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): + """Delete all activity events by event ids.""" + query: Query = delete(ActivityEventsSchema) + query = query.where(ActivityEventsSchema.event_id.in_(event_ids)) + await self._execute(query) + + async def count_by_applet(self, applet_id: uuid.UUID) -> list[ActivityEventCount]: + """Return activity ids with event count.""" query: Query = select( - func.count(EventSchema.id).label("count"), + ActivitySchema.id, + func.count(ActivityEventsSchema.event_id).label("count"), + ActivitySchema.name, ) - query = query.where( - EventSchema.activity_id == activity_id, - EventSchema.is_deleted.is_(False), - EventSchema.user_id == respondent_id, + query = query.select_from(ActivitySchema) + query = query.join( + ActivityEventsSchema, + and_( + ActivitySchema.id == ActivityEventsSchema.activity_id, + ActivityEventsSchema.is_deleted == False, # noqa: E712 + ), + isouter=True, ) + query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) + + query = query.filter(ActivitySchema.is_deleted == False) # noqa: E712 + query = query.filter(ActivitySchema.applet_id == applet_id) + query = query.filter(EventSchema.periodicity != PeriodicityType.ALWAYS) + query = query.group_by(ActivitySchema.applet_id, ActivitySchema.id) result = await self._execute(query) - count: int = result.scalar() - return count + activity_event_counts: list[ActivityEventCount] = [ + ActivityEventCount( + activity_id=activity_id, + count=count, + activity_name=name, + ) + for activity_id, count, name in result + ] - async def count_by_flow(self, flow_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: + return activity_event_counts + + async def count_by_activity(self, activity_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: """Return event count.""" query: Query = select( - func.count(EventSchema.id).label("count"), + func.count(ActivityEventsSchema.event_id).label("count"), + ) + query = query.join( + UserEventsSchema, + UserEventsSchema.event_id == ActivityEventsSchema.event_id, + isouter=True, ) + query = query.filter(ActivityEventsSchema.activity_id == activity_id) query = query.filter( - EventSchema.activity_flow_id == flow_id, - EventSchema.is_deleted.is_(False), - EventSchema.user_id == respondent_id, + ActivityEventsSchema.is_deleted == False # noqa: E712 ) + query = query.filter(UserEventsSchema.user_id == respondent_id) result = await self._execute(query) count: int = result.scalar() return count - async def count_by_applet(self, applet_id: uuid.UUID) -> tuple[list[ActivityEventCount], list[FlowEventCount]]: - """Return activity ids and flow ids with event count.""" + async def get_by_event_ids(self, event_ids: list[uuid.UUID]) -> list[uuid.UUID]: + """Return activity event instances.""" + query: Query = select(distinct(ActivityEventsSchema.activity_id)) + query = query.where(ActivityEventsSchema.event_id.in_(event_ids)) + result = await self._execute(query) + activity_ids = result.scalars().all() + return activity_ids - query: Query = select( - ActivitySchema.id.label("activity_id"), - ActivitySchema.name.label("activity_name"), - ActivityFlowSchema.id.label("flow_id"), - ActivityFlowSchema.name.label("flow_name"), - func.count(EventSchema.id).label("count"), - ) - query = query.select_from(EventSchema) + async def get_by_applet_id(self, applet_id: uuid.UUID) -> list[ActivityEvent]: + """Return activity event instances.""" + query: Query = select(ActivityEventsSchema) + query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) + query = query.where(EventSchema.applet_id == applet_id) + result = await self._execute(query) + activity_events = result.scalars().all() + + return [ActivityEvent.from_orm(activity_event) for activity_event in activity_events] + + async def get_by_applet_and_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[ActivityEvent]: + """Return activity event instances.""" + query: Query = select(ActivityEventsSchema) + query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) + query = query.join(UserEventsSchema, EventSchema.id == UserEventsSchema.event_id) query = query.join( + ActivitySchema, + ActivityEventsSchema.activity_id == ActivitySchema.id, + ) + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(UserEventsSchema.user_id == user_id) + result = await self._execute(query) + activity_events = result.scalars().all() + + return [ActivityEvent.from_orm(activity_event) for activity_event in activity_events] + + async def get_missing_events(self, activity_ids: list[uuid.UUID]) -> list[uuid.UUID]: + query: Query = select(ActivityEventsSchema.activity_id) + query.join( ActivitySchema, and_( - ActivitySchema.id == EventSchema.activity_id, - ActivitySchema.is_deleted.is_(False), + ActivitySchema.id == ActivityEventsSchema.activity_id, + ActivitySchema.is_reviewable.is_(False), ), - isouter=True, ) + query.where(ActivityEventsSchema.activity_id.in_(activity_ids)) + res = await self._execute(query) + db_result = res.scalars().all() + return list(set(activity_ids) - set(db_result)) + + +class FlowEventsCRUD(BaseCRUD[FlowEventsSchema]): + schema_class = FlowEventsSchema + + async def save(self, schema: FlowEventCreate) -> FlowEvent: + """Return flow event instance and the created information.""" + try: + instance: FlowEventsSchema = await self._create(FlowEventsSchema(**schema.dict())) + except IntegrityError: + raise FlowEventAlreadyExists(flow_id=schema.flow_id, event_id=schema.event_id) + + flow_event: FlowEvent = FlowEvent.from_orm(instance) + return flow_event + + async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: + """Return flow event instances.""" + query: Query = select(FlowEventsSchema.flow_id) + query = query.where(FlowEventsSchema.event_id == event_id) + query = query.where(FlowEventsSchema.is_deleted == False) # noqa: E712 + result = await self._execute(query) + + try: + flow_id: uuid.UUID = result.scalars().one_or_none() + except MultipleResultsFound: + raise EventError(message=f"Event{event_id} is used in multiple flows".format(event_id=event_id)) + + return flow_id + + async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): + """Delete all flow events by event ids.""" + query: Query = delete(FlowEventsSchema) + query = query.where(FlowEventsSchema.event_id.in_(event_ids)) + await self._execute(query) + + async def count_by_applet(self, applet_id: uuid.UUID) -> list[FlowEventCount]: + """Return flow ids with event count.""" + + query: Query = select( + ActivityFlowSchema.id, + func.count(FlowEventsSchema.id).label("count"), + ActivityFlowSchema.name, + ) + query = query.select_from(ActivityFlowSchema) + query = query.join( - ActivityFlowSchema, + FlowEventsSchema, and_( - ActivityFlowSchema.id == EventSchema.activity_flow_id, - ActivityFlowSchema.is_deleted.is_(False), + FlowEventsSchema.flow_id == ActivityFlowSchema.id, + FlowEventsSchema.is_deleted == False, # noqa: E712 ), isouter=True, ) + query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) - query = query.where( - EventSchema.is_deleted.is_(False), - EventSchema.applet_id == applet_id, - EventSchema.periodicity != PeriodicityType.ALWAYS, + query = query.filter(ActivityFlowSchema.applet_id == applet_id) + query = query.filter( + ActivityFlowSchema.is_deleted == False # noqa: E712 ) - query = query.group_by(EventSchema.applet_id, ActivitySchema.id, ActivityFlowSchema.id) + query = query.filter(EventSchema.periodicity != PeriodicityType.ALWAYS) + query = query.group_by(ActivityFlowSchema.applet_id, ActivityFlowSchema.id) result = await self._execute(query) - activity_event_counts: list[ActivityEventCount] = [] - flow_event_counts: list[FlowEventCount] = [] - - for activity_id, activity_name, flow_id, flow_name, count in result: - if activity_id: - activity_event_counts.append( - ActivityEventCount( - activity_id=activity_id, - count=count, - activity_name=activity_name, - ) - ) - if flow_id: - flow_event_counts.append( - FlowEventCount( - flow_id=flow_id, - count=count, - flow_name=flow_name, - ) - ) + flow_event_counts: list[FlowEventCount] = [ + FlowEventCount( + flow_id=flow_id, + count=count, + flow_name=name, + ) + for flow_id, count, name in result + ] - return activity_event_counts, flow_event_counts + return flow_event_counts - async def get_activities_without_events(self, activity_ids: list[uuid.UUID]) -> list[uuid.UUID]: - query: Query = select(EventSchema.activity_id) - query.join( - ActivitySchema, - and_( - ActivitySchema.id == EventSchema.activity_id, - ActivitySchema.is_reviewable.is_(False), - ), - ) - query.where(EventSchema.activity_id.in_(activity_ids)) - res = await self._execute(query) - db_result = res.scalars().all() - return list(set(activity_ids) - set(db_result)) + async def get_by_event_ids(self, event_ids: list[uuid.UUID]) -> list[uuid.UUID]: + """Return flow event instances.""" + query: Query = select(distinct(FlowEventsSchema.flow_id)) + query = query.where(FlowEventsSchema.event_id.in_(event_ids)) + result = await self._execute(query) + flow_ids = result.scalars().all() + return flow_ids - async def get_by_type_and_applet_id(self, applet_id: uuid.UUID, event_type: EventType) -> list[Event]: - """Return event instances of type flow.""" - query: Query = select(EventSchema) - query = query.where( - EventSchema.applet_id == applet_id, - EventSchema.event_type == event_type, + async def count_by_flow(self, flow_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: + """Return event count.""" + + query: Query = select( + func.count(FlowEventsSchema.event_id).label("count"), + ) + query = query.join( + UserEventsSchema, + FlowEventsSchema.event_id == UserEventsSchema.event_id, + isouter=True, ) + query = query.filter(FlowEventsSchema.flow_id == flow_id) + query = query.filter( + FlowEventsSchema.is_deleted == False # noqa: E712 + ) + query = query.filter(UserEventsSchema.user_id == respondent_id) + result = await self._execute(query) - if event_type == EventType.FLOW: - query = query.where(EventSchema.activity_flow_id.isnot(None)) - else: - query = query.where(EventSchema.activity_id.isnot(None)) + count: int = result.scalar() + return count + + async def get_by_applet_id(self, applet_id: uuid.UUID) -> list[FlowEvent]: + """Return flow event instances.""" + query: Query = select(FlowEventsSchema) + query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) + query = query.where(EventSchema.applet_id == applet_id) + result = await self._execute(query) + flow_events = result.scalars().all() + return [FlowEvent.from_orm(flow_event) for flow_event in flow_events] + + async def get_by_applet_and_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[FlowEvent]: + """Return flow event instances.""" + query: Query = select(FlowEventsSchema) + query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) + query = query.join(UserEventsSchema, EventSchema.id == UserEventsSchema.event_id) + query = query.where(EventSchema.applet_id == applet_id) + query = query.where(UserEventsSchema.user_id == user_id) result = await self._execute(query) flow_events = result.scalars().all() - return [Event.from_orm(flow_event) for flow_event in flow_events] + return [FlowEvent.from_orm(flow_event) for flow_event in flow_events] diff --git a/src/apps/schedule/db/schemas.py b/src/apps/schedule/db/schemas.py index dcc6f654876..bdfade12234 100644 --- a/src/apps/schedule/db/schemas.py +++ b/src/apps/schedule/db/schemas.py @@ -1,4 +1,5 @@ import datetime +import uuid from sqlalchemy import Boolean, Column, Date, ForeignKey, Integer, Interval, String, Time, UniqueConstraint, text from sqlalchemy.dialects.postgresql import ENUM, UUID @@ -7,6 +8,15 @@ from infrastructure.database.mixins import HistoryAware +class PeriodicitySchema(Base): + __tablename__ = "periodicity" + + type = Column(String(10), nullable=False) # Options: ONCE, DAILY, WEEKLY, WEEKDAYS, MONTHLY, ALWAYS + start_date = Column(Date, nullable=True) + end_date = Column(Date, nullable=True) + selected_date = Column(Date, nullable=True) + + class _BaseEventSchema: start_time = Column(Time, nullable=True) end_time = Column(Time, nullable=True) @@ -26,16 +36,17 @@ class _BaseEventSchema: start_date = Column(Date, nullable=True) end_date = Column(Date, nullable=True) selected_date = Column(Date, nullable=True) - event_type = Column(ENUM("activity", "flow", name="event_type_enum", create_type=False), nullable=False) - activity_id = Column(UUID(as_uuid=True), nullable=True) - activity_flow_id = Column(UUID(as_uuid=True), nullable=True) class EventSchema(_BaseEventSchema, Base): __tablename__ = "events" + periodicity_id = Column( + UUID(as_uuid=True), + default=lambda: uuid.uuid4(), + server_default=text("gen_random_uuid()"), + ) applet_id = Column(ForeignKey("applets.id", ondelete="CASCADE"), nullable=False) - user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=True) class EventHistorySchema(_BaseEventSchema, HistoryAware, Base): @@ -43,6 +54,9 @@ class EventHistorySchema(_BaseEventSchema, HistoryAware, Base): id_version = Column(String(), primary_key=True) id = Column(UUID(as_uuid=True)) + event_type = Column(ENUM("activity", "flow", name="event_type_enum", create_type=False), nullable=False) + activity_id = Column(UUID(as_uuid=True), nullable=True) + activity_flow_id = Column(UUID(as_uuid=True), nullable=True) user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=True) @@ -62,6 +76,54 @@ class AppletEventsSchema(Base): ) +class UserEventsSchema(Base): + __tablename__ = "user_events" + + user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=False) + event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) + + __table_args__ = ( + UniqueConstraint( + "user_id", + "event_id", + "is_deleted", + name="_unique_user_events", + ), + ) + + +class ActivityEventsSchema(Base): + __tablename__ = "activity_events" + + activity_id = Column(UUID(as_uuid=True), nullable=False) + event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) + + __table_args__ = ( + UniqueConstraint( + "activity_id", + "event_id", + "is_deleted", + name="_unique_activity_events", + ), + ) + + +class FlowEventsSchema(Base): + __tablename__ = "flow_events" + + flow_id = Column(UUID(as_uuid=True), nullable=False) + event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) + + __table_args__ = ( + UniqueConstraint( + "flow_id", + "event_id", + "is_deleted", + name="_unique_flow_events", + ), + ) + + class _BaseNotificationSchema: from_time = Column(Time, nullable=True) to_time = Column(Time, nullable=True) diff --git a/src/apps/schedule/domain/schedule/internal.py b/src/apps/schedule/domain/schedule/internal.py index f1761eefb6a..dc47f06c707 100644 --- a/src/apps/schedule/domain/schedule/internal.py +++ b/src/apps/schedule/domain/schedule/internal.py @@ -3,7 +3,7 @@ from pydantic import Field, NonNegativeInt, root_validator -from apps.schedule.domain.constants import AvailabilityType, EventType, PeriodicityType, TimerType +from apps.schedule.domain.constants import AvailabilityType, PeriodicityType, TimerType from apps.schedule.domain.schedule.base import BaseEvent, BaseNotificationSetting, BaseReminderSetting from apps.schedule.domain.schedule.public import ( EventAvailabilityDto, @@ -20,8 +20,14 @@ __all__ = [ "Event", "ScheduleEvent", + "UserEvent", + "ActivityEvent", + "FlowEvent", "EventCreate", "EventUpdate", + "UserEventCreate", + "ActivityEventCreate", + "FlowEventCreate", "EventFull", "NotificationSettingCreate", "NotificationSetting", @@ -39,10 +45,6 @@ class EventCreate(BaseEvent, InternalModel): None, description="If type is WEEKLY, MONTHLY or ONCE, selectedDate must be set.", ) - user_id: uuid.UUID | None = None - activity_id: uuid.UUID | None = None - activity_flow_id: uuid.UUID | None = None - event_type: EventType @root_validator def validate_periodicity(cls, values): @@ -64,6 +66,39 @@ class Event(EventCreate, InternalModel): version: str +class UserEventCreate(InternalModel): + user_id: uuid.UUID + event_id: uuid.UUID + + +class UserEvent(UserEventCreate, InternalModel): + """UserEvent of a schedule""" + + id: uuid.UUID + + +class ActivityEventCreate(InternalModel): + activity_id: uuid.UUID + event_id: uuid.UUID + + +class ActivityEvent(ActivityEventCreate, InternalModel): + """ActivityEvent of a schedule""" + + id: uuid.UUID + + +class FlowEventCreate(InternalModel): + flow_id: uuid.UUID + event_id: uuid.UUID + + +class FlowEvent(FlowEventCreate, InternalModel): + """FlowEvent of a schedule""" + + id: uuid.UUID + + class NotificationSettingCreate(BaseNotificationSetting, InternalModel): event_id: uuid.UUID @@ -93,7 +128,6 @@ class EventFull(InternalModel, BaseEvent): activity_id: uuid.UUID | None = None flow_id: uuid.UUID | None = None version: str - event_type: EventType class ScheduleEvent(EventFull): diff --git a/src/apps/schedule/service/schedule.py b/src/apps/schedule/service/schedule.py index 2356de0780e..417a605ac73 100644 --- a/src/apps/schedule/service/schedule.py +++ b/src/apps/schedule/service/schedule.py @@ -6,21 +6,23 @@ from apps.activity_flows.crud import FlowsCRUD from apps.applets.crud import AppletsCRUD, UserAppletAccessCRUD from apps.applets.errors import AppletNotFoundError -from apps.schedule.crud.events import EventCRUD +from apps.schedule.crud.events import ActivityEventsCRUD, EventCRUD, FlowEventsCRUD, UserEventsCRUD from apps.schedule.crud.notification import NotificationCRUD, ReminderCRUD -from apps.schedule.crud.schedule_history import NotificationHistoryCRUD, ReminderHistoryCRUD from apps.schedule.db.schemas import EventSchema, NotificationSchema -from apps.schedule.domain.constants import DefaultEvent, EventType, PeriodicityType +from apps.schedule.domain.constants import DefaultEvent, PeriodicityType from apps.schedule.domain.schedule import BaseEvent from apps.schedule.domain.schedule.internal import ( + ActivityEventCreate, Event, EventCreate, EventFull, EventUpdate, + FlowEventCreate, NotificationSetting, ReminderSetting, ReminderSettingCreate, ScheduleEvent, + UserEventCreate, ) from apps.schedule.domain.schedule.public import ( PublicEvent, @@ -93,16 +95,25 @@ async def create_schedule(self, schedule: EventRequest, applet_id: uuid.UUID) -> start_date=schedule.periodicity.start_date, end_date=schedule.periodicity.end_date, selected_date=schedule.periodicity.selected_date, - user_id=schedule.respondent_id, - activity_id=schedule.activity_id, - activity_flow_id=schedule.flow_id, - event_type=EventType.ACTIVITY if schedule.activity_id else EventType.FLOW, ) ) + # Create user event + if schedule.respondent_id: + await UserEventsCRUD(self.session).save(UserEventCreate(event_id=event.id, user_id=schedule.respondent_id)) + # Create event-activity or event-flow + if schedule.activity_id: + await ActivityEventsCRUD(self.session).save( + ActivityEventCreate(event_id=event.id, activity_id=schedule.activity_id) + ) + else: + await FlowEventsCRUD(self.session).save(FlowEventCreate(event_id=event.id, flow_id=schedule.flow_id)) + schedule_event = ScheduleEvent( - **event.dict(exclude={"applet_id", "activity_flow_id"}), - flow_id=event.activity_flow_id, + **event.dict(exclude={"applet_id"}), + activity_id=schedule.activity_id, + flow_id=schedule.flow_id, + user_id=schedule.respondent_id, ) # Create notification and reminder @@ -166,6 +177,7 @@ async def create_schedule(self, schedule: EventRequest, applet_id: uuid.UUID) -> selected_date=event.selected_date, ), respondent_id=schedule.respondent_id, + activity_id=schedule.activity_id, flow_id=schedule.flow_id, notification=notification_public if schedule.notification else None, ) @@ -175,18 +187,22 @@ async def get_schedule_by_id(self, schedule_id: uuid.UUID, applet_id: uuid.UUID) await self._validate_applet(applet_id=applet_id) event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) + user_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=event.id) + activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) + flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) notification = await self._get_notifications_and_reminder(event.id) return PublicEvent( - **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), + **event.dict(exclude={"periodicity"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=event.user_id, - flow_id=event.activity_flow_id, + respondent_id=user_id, + activity_id=activity_id, + flow_id=flow_id, notification=notification, ) @@ -208,19 +224,24 @@ async def get_all_schedules(self, applet_id: uuid.UUID, query: QueryParams | Non for event_schema in event_schemas: event: Event = Event.from_orm(event_schema) + + user_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=event.id) + activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) + flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) notification = await self._get_notifications_and_reminder(event.id) events.append( PublicEvent( - **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), + **event.dict(exclude={"periodicity"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=event.user_id, - flow_id=event.activity_flow_id, + respondent_id=user_id, + activity_id=activity_id, + flow_id=flow_id, notification=notification, ) ) @@ -236,6 +257,8 @@ async def get_public_all_schedules(self, key: uuid.UUID) -> PublicEventByUser: full_events: list[EventFull] = [] for event_schema in event_schemas: event: Event = Event.from_orm(event_schema) + activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) + flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) base_event = BaseEvent(**event.dict()) full_events.append( @@ -246,11 +269,9 @@ async def get_public_all_schedules(self, key: uuid.UUID) -> PublicEventByUser: start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, - activity_id=event.activity_id, - flow_id=event.activity_flow_id, - user_id=event.user_id, + activity_id=activity_id, + flow_id=flow_id, version=event.version, - event_type=event.event_type, ) ) @@ -277,6 +298,10 @@ async def delete_all_schedules(self, applet_id: uuid.UUID): event_schemas: list[EventSchema] = await EventCRUD(self.session).get_all_by_applet_id_with_filter(applet_id) event_ids = [event_schema.id for event_schema in event_schemas] + # Get all activity_ids and flow_ids + activity_ids = await ActivityEventsCRUD(self.session).get_by_event_ids(event_ids) + flow_ids = await FlowEventsCRUD(self.session).get_by_event_ids(event_ids) + await self._delete_by_ids(event_ids) await ScheduleHistoryService(self.session).mark_as_deleted( @@ -284,54 +309,50 @@ async def delete_all_schedules(self, applet_id: uuid.UUID): ) # Create default events for activities and flows - processed_activities_and_flows: dict[uuid.UUID, bool] = {} - for event in event_schemas: - if event.activity_id and event.activity_id not in processed_activities_and_flows: - await self._create_default_event( - applet_id=applet_id, - activity_id=event.activity_id, - is_activity=True, - respondent_id=event.user_id, - ) - processed_activities_and_flows[event.activity_id] = True - if event.activity_flow_id and event.activity_flow_id not in processed_activities_and_flows: - await self._create_default_event( - applet_id=applet_id, - activity_id=event.activity_flow_id, - is_activity=False, - respondent_id=event.user_id, - ) - processed_activities_and_flows[event.activity_flow_id] = True + for activity_id in activity_ids: + await self._create_default_event(applet_id=applet_id, activity_id=activity_id, is_activity=True) + + for flow_id in flow_ids: + await self._create_default_event(applet_id=applet_id, activity_id=flow_id, is_activity=False) async def delete_schedule_by_id(self, schedule_id: uuid.UUID) -> uuid.UUID | None: - crud = EventCRUD(self.session) - event: Event = await crud.get_by_id(pk=schedule_id) + event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) + respondent_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) + # Get activity_id or flow_id if exists + activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) + flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) + + # Delete event-user, event-activity, event-flow await self._delete_by_ids(event_ids=[schedule_id]) await ScheduleHistoryService(self.session).mark_as_deleted([(event.id, event.version)]) - # Create default event for activity or flow if another event doesn't exist - if event.activity_id: - count_events = await crud.count_by_activity(activity_id=event.activity_id, respondent_id=event.user_id) + # Create default event for activity or flow if another event doesn't exist # noqa: E501 + if activity_id: + count_events = await ActivityEventsCRUD(self.session).count_by_activity( + activity_id=activity_id, respondent_id=respondent_id + ) if count_events == 0: await self._create_default_event( applet_id=event.applet_id, - activity_id=event.activity_id, + activity_id=activity_id, is_activity=True, - respondent_id=event.user_id, + respondent_id=respondent_id, ) - elif event.activity_flow_id: - count_events = await crud.count_by_flow(flow_id=event.activity_flow_id, respondent_id=event.user_id) + elif flow_id: + count_events = await FlowEventsCRUD(self.session).count_by_flow( + flow_id=flow_id, respondent_id=respondent_id + ) if count_events == 0: await self._create_default_event( applet_id=event.applet_id, - activity_id=event.activity_flow_id, + activity_id=flow_id, is_activity=False, - respondent_id=event.user_id, + respondent_id=respondent_id, ) - return event.user_id + return respondent_id async def update_schedule( self, @@ -343,21 +364,22 @@ async def update_schedule( await self._validate_applet(applet_id=applet_id) event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) + activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) + flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) + respondent_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) # Delete all events of this activity or flow - # if new periodicity type is "always" and old periodicity type is not "always" - if schedule.periodicity.type == PeriodicityType.ALWAYS and event.periodicity != PeriodicityType.ALWAYS: + # if new periodicity type is "always" and old periodicity type is not "always" # noqa: E501 + if schedule.periodicity.type == PeriodicityType.ALWAYS and event.periodicity != PeriodicityType.ALWAYS: # noqa: E501 await self._delete_by_activity_or_flow( applet_id=applet_id, - activity_id=event.activity_id, - flow_id=event.activity_flow_id, - respondent_id=event.user_id, + activity_id=activity_id, + flow_id=flow_id, + respondent_id=respondent_id, only_always_available=False, except_event_id=schedule_id, ) - old_event_version = event.version - # Update event event = await EventCRUD(self.session).update( pk=schedule_id, @@ -373,27 +395,20 @@ async def update_schedule( start_date=schedule.periodicity.start_date, end_date=schedule.periodicity.end_date, selected_date=schedule.periodicity.selected_date, - event_type=event.event_type, - activity_id=event.activity_id, - activity_flow_id=event.activity_flow_id, - user_id=event.user_id, ), ) schedule_event = ScheduleEvent( - **event.dict(exclude={"applet_id", "activity_flow_id"}), - flow_id=event.activity_flow_id, + **event.dict(exclude={"applet_id"}), + activity_id=activity_id, + flow_id=flow_id, + user_id=respondent_id, ) # Update notification await NotificationCRUD(self.session).delete_by_event_ids([schedule_id]) await ReminderCRUD(self.session).delete_by_event_ids([schedule_id]) - await asyncio.gather( - NotificationHistoryCRUD(self.session).mark_as_deleted([(event.id, old_event_version)]), - ReminderHistoryCRUD(self.session).mark_as_deleted([(event.id, old_event_version)]), - ) - notification_public = None if schedule.notification: notifications = None @@ -446,15 +461,16 @@ async def update_schedule( ) return PublicEvent( - **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), + **event.dict(exclude={"periodicity"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=event.user_id, - flow_id=event.activity_flow_id, + respondent_id=respondent_id, + activity_id=activity_id, + flow_id=flow_id, notification=notification_public, ) @@ -491,43 +507,46 @@ async def count_schedules(self, applet_id: uuid.UUID) -> PublicEventCount: event_count = PublicEventCount(activity_events=[], flow_events=[]) # Get list of activity-event ids - activity_counts, flow_counts = await EventCRUD(self.session).count_by_applet(applet_id=applet_id) + activity_counts = await ActivityEventsCRUD(self.session).count_by_applet(applet_id=applet_id) + + # Get list of flow-event ids + flow_counts = await FlowEventsCRUD(self.session).count_by_applet(applet_id=applet_id) event_count.activity_events = activity_counts if activity_counts else [] event_count.flow_events = flow_counts if flow_counts else [] return event_count - async def delete_by_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> None: + async def delete_by_user_id(self, applet_id, user_id): # Check if applet exists await self._validate_applet(applet_id=applet_id) # Check if user exists await self._validate_user(user_id=user_id) - event_schemas = await EventCRUD(self.session).get_all_by_applet_and_user(applet_id, user_id) + # Get list of activity-event ids and flow-event ids for user to create default events # noqa: E501 + activities = await ActivityEventsCRUD(self.session).get_by_applet_and_user_id(applet_id, user_id) - # List of event_ids for user for deletion - event_ids: list[uuid.UUID] = [] - activity_ids: set[uuid.UUID] = set() - flow_ids: set[uuid.UUID] = set() + activity_ids = {activity.activity_id for activity in activities} - for event in event_schemas: - event_ids.append(event.id) - if event.activity_id: - activity_ids.add(event.activity_id) - if event.flow_id: - flow_ids.add(event.flow_id) + flows = await FlowEventsCRUD(self.session).get_by_applet_and_user_id(applet_id, user_id) + flow_ids = {flow.flow_id for flow in flows} + # Get list of event_ids for user and delete them all + event_schemas = await EventCRUD(self.session).get_all_by_applet_and_user(applet_id, user_id) + event_ids = [event_schema.id for event_schema in event_schemas] if not event_ids: raise ScheduleNotFoundError() - await self._delete_by_ids(event_ids=event_ids) + await self._delete_by_ids( + event_ids=event_ids, + user_id=user_id, + ) await ScheduleHistoryService(self.session).mark_as_deleted( [(event.id, event.version) for event in event_schemas] ) - # Create always available events for all activities and flows + # Create AA events for all activities and flows await self.create_default_schedules( applet_id=applet_id, activity_ids=list(activity_ids), @@ -589,7 +608,8 @@ async def _delete_by_activity_or_flow( only_always_available, ) - event_ids = [event.id for event in event_schemas if event.id != except_event_id] + clean_events = [event for event in event_schemas if event.id != except_event_id] + event_ids = [event.id for event in clean_events] if event_ids: await self._delete_by_ids(event_ids=event_ids) @@ -600,7 +620,18 @@ async def _delete_by_activity_or_flow( async def _delete_by_ids( self, event_ids: list[uuid.UUID], + user_id: uuid.UUID | None = None, ): + if user_id: + await UserEventsCRUD(self.session).delete_all_by_events_and_user( + event_ids, + user_id, + ) + else: + await UserEventsCRUD(self.session).delete_all_by_event_ids(event_ids) + + await ActivityEventsCRUD(self.session).delete_all_by_event_ids(event_ids) + await FlowEventsCRUD(self.session).delete_all_by_event_ids(event_ids) await NotificationCRUD(self.session).delete_by_event_ids(event_ids) await ReminderCRUD(self.session).delete_by_event_ids(event_ids) await EventCRUD(self.session).delete_by_ids(event_ids) @@ -851,7 +882,10 @@ async def remove_individual_calendar(self, user_id: uuid.UUID, applet_id: uuid.U if not event_ids: raise ScheduleNotFoundError() - await self._delete_by_ids(event_ids=event_ids) + await self._delete_by_ids( + event_ids=event_ids, + user_id=user_id, + ) await ScheduleHistoryService(self.session).mark_as_deleted( [(event.id, event.version) for event in event_schemas] @@ -913,7 +947,7 @@ async def create_default_schedules_if_not_exist( activity_ids: list[uuid.UUID], ) -> None: """Create default schedules for applet.""" - activities_without_events = await EventCRUD(self.session).get_activities_without_events(activity_ids) + activities_without_events = await ActivityEventsCRUD(self.session).get_missing_events(activity_ids) await self.create_default_schedules( applet_id=applet_id, activity_ids=activities_without_events, diff --git a/src/apps/schedule/service/schedule_history.py b/src/apps/schedule/service/schedule_history.py index 62df4620d7a..2794be71061 100644 --- a/src/apps/schedule/service/schedule_history.py +++ b/src/apps/schedule/service/schedule_history.py @@ -27,9 +27,6 @@ def __init__(self, session): async def add_history(self, applet_id: uuid.UUID, event: ScheduleEvent): applet = await AppletsCRUD(self.session).get_by_id(applet_id) - # Refresh the applet so we don't get the old version number, in case the version has changed - await self.session.refresh(applet) - event_history = await ScheduleHistoryCRUD(self.session).add( EventHistorySchema( start_time=event.start_time, diff --git a/src/apps/workspaces/crud/user_applet_access.py b/src/apps/workspaces/crud/user_applet_access.py index 05579baee1b..b2f46c1c970 100644 --- a/src/apps/workspaces/crud/user_applet_access.py +++ b/src/apps/workspaces/crud/user_applet_access.py @@ -29,7 +29,7 @@ from apps.applets.db.schemas import AppletSchema from apps.invitations.constants import InvitationStatus from apps.invitations.db import InvitationSchema -from apps.schedule.db.schemas import EventSchema +from apps.schedule.db.schemas import EventSchema, UserEventsSchema from apps.shared.encryption import get_key from apps.shared.filtering import FilterField, Filtering from apps.shared.ordering import Ordering @@ -402,9 +402,10 @@ async def get_workspace_respondents( workspace_applets_sq = self.workspace_applets_subquery(owner_id, applet_id) schedule_exists = ( - select(EventSchema) + select(UserEventsSchema) + .join(EventSchema, EventSchema.id == UserEventsSchema.event_id) .where( - EventSchema.user_id == UserAppletAccessSchema.user_id, + UserEventsSchema.user_id == UserAppletAccessSchema.user_id, EventSchema.applet_id == UserAppletAccessSchema.applet_id, ) .exists() @@ -1046,11 +1047,12 @@ async def get_respondent_accesses_by_owner_id( page: int, limit: int, ) -> list[RespondentAppletAccess]: - individual_event_query: Query = select(EventSchema.id) + individual_event_query: Query = select(UserEventsSchema.id) + individual_event_query = individual_event_query.join(EventSchema, EventSchema.id == UserEventsSchema.event_id) individual_event_query = individual_event_query.where( - EventSchema.user_id == UserAppletAccessSchema.user_id, - EventSchema.applet_id == UserAppletAccessSchema.applet_id, + UserEventsSchema.user_id == UserAppletAccessSchema.user_id ) + individual_event_query = individual_event_query.where(EventSchema.applet_id == UserAppletAccessSchema.applet_id) query: Query = select( SubjectSchema.secret_user_id, diff --git a/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py b/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py deleted file mode 100644 index 69c43bf080d..00000000000 --- a/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py +++ /dev/null @@ -1,174 +0,0 @@ -"""Clean up schedule tables - -Revision ID: 3059a8ad6ec5 -Revises: 7c7e30fa96a4 -Create Date: 2025-02-02 18:39:01.011295 - -""" - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = "3059a8ad6ec5" -down_revision = "7c7e30fa96a4" -branch_labels = None -depends_on = None - -EVENT_TYPE_ENUM = 'event_type_enum' -EVENT_TYPE_ENUM_VALUES = ['activity', 'flow'] - - -def upgrade() -> None: - # Add columns `event_type`, `activity_id`, `activity_flow_id`, and `user_id` to `events` - op.add_column("events", sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=True)) - op.add_column("events", sa.Column("activity_flow_id", postgresql.UUID(as_uuid=True), nullable=True)) - op.add_column("events", sa.Column("user_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("users.id", ondelete="RESTRICT"), nullable=True)) - op.add_column("events", sa.Column("event_type", postgresql.ENUM(*EVENT_TYPE_ENUM_VALUES, name=EVENT_TYPE_ENUM, create_type=False), nullable=True)) - - # Migrate data from `activity_events`, `flow_events`, and `user_events` to `events` - op.execute(""" - UPDATE events dst - SET activity_id=ae.activity_id, - activity_flow_id = fe.flow_id, - user_id=ue.user_id, - event_type=(CASE WHEN ae.activity_id IS NOT NULL THEN 'activity' ELSE 'flow' END)::event_type_enum - FROM events e - LEFT JOIN activity_events ae ON e.id = ae.event_id - LEFT JOIN flow_events fe ON e.id = fe.event_id - LEFT JOIN user_events ue ON e.id = ue.event_id - WHERE dst.id = e.id - """) - - # Make sure that the `event_type` column is not null - op.alter_column("events", "event_type", nullable=False) - - # Drop the `periodicity_id` column from the `events` table - op.drop_column("events", "periodicity_id") - - # Drop tables - op.drop_table("activity_events") - op.drop_table("flow_events") - op.drop_table("user_events") - op.drop_table("periodicity") - - -def downgrade() -> None: - # Recreate the dropped tables - op.create_table( - "activity_events", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("is_deleted", sa.Boolean(), nullable=True), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("migrated_date", sa.DateTime(), nullable=True), - sa.Column("migrated_updated", sa.DateTime(), nullable=True), - sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("pk_activity_events")), - sa.UniqueConstraint("activity_id", "event_id", "is_deleted", name="_unique_activity_events"), - sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_activity_events_event_id_events"), ondelete="CASCADE"), - ) - - op.create_table( - "flow_events", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("is_deleted", sa.Boolean(), nullable=True), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("migrated_date", sa.DateTime(), nullable=True), - sa.Column("migrated_updated", sa.DateTime(), nullable=True), - sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("flow_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("pk_flow_events")), - sa.UniqueConstraint("flow_id", "event_id", "is_deleted", name="_unique_flow_events"), - sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_flow_events_event_id_events"), ondelete="CASCADE"), - ) - - op.create_table( - "user_events", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("is_deleted", sa.Boolean(), nullable=True), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("migrated_date", sa.DateTime(), nullable=True), - sa.Column("migrated_updated", sa.DateTime(), nullable=True), - sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("pk_user_events")), - sa.UniqueConstraint("user_id", "event_id", "is_deleted", name="_unique_user_events"), - sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_user_events_event_id_events"), ondelete="CASCADE"), - sa.ForeignKeyConstraint(["user_id"], ["users.id"], name=op.f("fk_user_events_user_id_users"), ondelete="RESTRICT"), - ) - - op.create_table( - "periodicity", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("is_deleted", sa.Boolean(), nullable=True), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("migrated_date", sa.DateTime(), nullable=True), - sa.Column("migrated_updated", sa.DateTime(), nullable=True), - sa.Column("type", sa.String(10), nullable=False), - sa.Column("start_date", sa.Date(), nullable=True), - sa.Column("end_date", sa.Date(), nullable=True), - sa.Column("selected_date", sa.Date(), nullable=True), - sa.PrimaryKeyConstraint("id", name=op.f("pk_periodicity")), - ) - - # Add the `periodicity_id` column back to the `events` table - op.add_column( - "events", - sa.Column( - "periodicity_id", - postgresql.UUID(as_uuid=True), - server_default=sa.text("gen_random_uuid()"), - nullable=False - ) - ) - - # Generate periodicity IDs for existing events - op.execute(""" - UPDATE events - SET periodicity_id = gen_random_uuid() - WHERE periodicity_id IS NULL - """) - - # Repopulate the `activity_events`, `flow_events`, `user_events`, and `periodicity` tables - # We do lose some data here (e.g. the original `id`, `created_at`, `updated_at`, `migrated_date`, `migrated_updated`), - # because we can't recover that data from the `events` table - op.execute(""" - INSERT INTO activity_events (id, is_deleted, activity_id, event_id) - SELECT gen_random_uuid(), e.is_deleted, e.activity_id, e.id - FROM events e - WHERE e.activity_id IS NOT NULL - AND e.event_type = 'activity' - """) - - op.execute(""" - INSERT INTO flow_events (id, is_deleted, flow_id, event_id) - SELECT gen_random_uuid(), e.is_deleted, e.activity_flow_id, e.id - FROM events e - WHERE e.activity_flow_id IS NOT NULL - AND e.event_type = 'flow' - """) - - op.execute(""" - INSERT INTO user_events (id, is_deleted, user_id, event_id) - SELECT gen_random_uuid(), e.is_deleted, e.user_id, e.id - FROM events e - WHERE e.user_id IS NOT NULL - """) - - op.execute(""" - INSERT INTO periodicity (id, is_deleted, type, start_date, end_date, selected_date) - SELECT e.periodicity_id, e.is_deleted, e.periodicity, e.start_date, e.end_date, e.selected_date - FROM events e - """) - - # Drop the new columns from the `events` table - op.drop_column("events", "activity_id") - op.drop_column("events", "activity_flow_id") - op.drop_column("events", "user_id") - op.drop_column("events", "event_type") From aafbaace8217d619c8fdf618e6820a656aeaa864 Mon Sep 17 00:00:00 2001 From: Kenroy Gobourne <14842108+sultanofcardio@users.noreply.github.com> Date: Wed, 19 Feb 2025 20:29:11 -0500 Subject: [PATCH 07/14] fix: Scheduling History Logic Fixes (M2-8494) (M2-8717) (M2-8725) (#1746) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR fixes the following issues: - After adding a new activity to an applet, its entry to `applet_events` uses the old applet version number - Adding a new scheduled event for an activity/flow removes all its previous scheduled event(s). There can be only one "always available" event, whereas many scheduled events per activity/flow should be allowed - In the *notification_histories* table, the *is_deleted* column doesn't become `true` after removing the notification from the event - In the *reminder_histories* table, the *is_deleted* column doesn’t become `true` after removing the reminder from the event --- src/apps/schedule/crud/events.py | 14 ++++++++------ src/apps/schedule/service/schedule.py | 8 ++++++++ src/apps/schedule/service/schedule_history.py | 3 +++ 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/apps/schedule/crud/events.py b/src/apps/schedule/crud/events.py index 4be6ec97d44..87b7d97f756 100644 --- a/src/apps/schedule/crud/events.py +++ b/src/apps/schedule/crud/events.py @@ -331,7 +331,7 @@ async def get_all_by_applet_and_activity( ) # select only always available if requested if only_always_available: - query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) + query = query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) query = query.where(EventSchema.applet_id == applet_id) query = query.where(EventSchema.is_deleted.is_(False)) @@ -376,10 +376,12 @@ async def validate_existing_always_available( isouter=True, ) - query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted.is_(False)) - query = query.where(UserEventsSchema.user_id == respondent_id) + query = query.where( + EventSchema.periodicity == PeriodicityType.ALWAYS, + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + UserEventsSchema.user_id == respondent_id, + ) query = query.limit(1) result = await self._execute(query) @@ -411,7 +413,7 @@ async def get_all_by_applet_and_flow( # select only always available if requested if only_always_available: - query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) + query = query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) query = query.where(EventSchema.applet_id == applet_id) query = query.where(EventSchema.is_deleted.is_(False)) diff --git a/src/apps/schedule/service/schedule.py b/src/apps/schedule/service/schedule.py index 417a605ac73..99f7d21e07a 100644 --- a/src/apps/schedule/service/schedule.py +++ b/src/apps/schedule/service/schedule.py @@ -8,6 +8,7 @@ from apps.applets.errors import AppletNotFoundError from apps.schedule.crud.events import ActivityEventsCRUD, EventCRUD, FlowEventsCRUD, UserEventsCRUD from apps.schedule.crud.notification import NotificationCRUD, ReminderCRUD +from apps.schedule.crud.schedule_history import NotificationHistoryCRUD, ReminderHistoryCRUD from apps.schedule.db.schemas import EventSchema, NotificationSchema from apps.schedule.domain.constants import DefaultEvent, PeriodicityType from apps.schedule.domain.schedule import BaseEvent @@ -380,6 +381,8 @@ async def update_schedule( except_event_id=schedule_id, ) + old_event_version = event.version + # Update event event = await EventCRUD(self.session).update( pk=schedule_id, @@ -409,6 +412,11 @@ async def update_schedule( await NotificationCRUD(self.session).delete_by_event_ids([schedule_id]) await ReminderCRUD(self.session).delete_by_event_ids([schedule_id]) + await asyncio.gather( + NotificationHistoryCRUD(self.session).mark_as_deleted([(event.id, old_event_version)]), + ReminderHistoryCRUD(self.session).mark_as_deleted([(event.id, old_event_version)]), + ) + notification_public = None if schedule.notification: notifications = None diff --git a/src/apps/schedule/service/schedule_history.py b/src/apps/schedule/service/schedule_history.py index 2794be71061..62df4620d7a 100644 --- a/src/apps/schedule/service/schedule_history.py +++ b/src/apps/schedule/service/schedule_history.py @@ -27,6 +27,9 @@ def __init__(self, session): async def add_history(self, applet_id: uuid.UUID, event: ScheduleEvent): applet = await AppletsCRUD(self.session).get_by_id(applet_id) + # Refresh the applet so we don't get the old version number, in case the version has changed + await self.session.refresh(applet) + event_history = await ScheduleHistoryCRUD(self.session).add( EventHistorySchema( start_time=event.start_time, From 257c326f188a6c924148562afad15e048a40a07d Mon Sep 17 00:00:00 2001 From: Kenroy Gobourne <14842108+sultanofcardio@users.noreply.github.com> Date: Thu, 20 Feb 2025 13:20:57 -0500 Subject: [PATCH 08/14] chore: Schedules database clean up (M2-8495) (#1723) This PR implements the following cleanup operations: - Adds four new columns to the `events` table: - `activity_id`: taken from the `activity_events` table - `activity_flow_id`: taken from the `flow_events` table - `user_id`: taken from the `user_events` table - `event_type`: Enum with value `activity` or `flow` - Removes the `periodicity_id` column from the `events` table - Migrates all the data from the `activity_events`, `flow_events`, and `user_events` tables into the `events` table - Drops the `activity_events`, `flow_events`, `user_events`, and `periodicity` tables - Updates the API code to account for these changes, which mostly included rewriting queries to select only from the `events` table where multiple joins (and sometimes multiple queries) were previously being done These changes are intended to be 100% backwards compatible and should introduce no new features from the client perspective --- src/apps/activities/services/activity.py | 7 +- src/apps/activity_flows/service/flow.py | 7 +- src/apps/applets/commands/applet_ema.py | 78 +- src/apps/schedule/crud/events.py | 803 +++++------------- src/apps/schedule/db/schemas.py | 70 +- src/apps/schedule/domain/schedule/internal.py | 46 +- src/apps/schedule/service/schedule.py | 202 ++--- .../workspaces/crud/user_applet_access.py | 14 +- ...25_02_02_18_39-clean_up_schedule_tables.py | 174 ++++ 9 files changed, 498 insertions(+), 903 deletions(-) create mode 100644 src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py diff --git a/src/apps/activities/services/activity.py b/src/apps/activities/services/activity.py index 20eb9bb68ff..d8d6dc94f48 100644 --- a/src/apps/activities/services/activity.py +++ b/src/apps/activities/services/activity.py @@ -23,7 +23,8 @@ from apps.activity_assignments.service import ActivityAssignmentService from apps.activity_flows.crud import FlowsCRUD from apps.applets.crud import AppletsCRUD, UserAppletAccessCRUD -from apps.schedule.crud.events import ActivityEventsCRUD, EventCRUD +from apps.schedule.crud.events import EventCRUD +from apps.schedule.domain.constants import EventType from apps.schedule.service.schedule import ScheduleService from apps.workspaces.domain.constants import Role from infrastructure.logger import logger @@ -115,9 +116,9 @@ async def update_create(self, applet_id: uuid.UUID, activities_create: list[Acti activity_id_key_map: dict[uuid.UUID, uuid.UUID] = dict() prepared_activity_items = list() - all_activities = await ActivityEventsCRUD(self.session).get_by_applet_id(applet_id) + activity_events = await EventCRUD(self.session).get_by_type_and_applet_id(applet_id, EventType.ACTIVITY) - all_activity_ids = [activity.activity_id for activity in all_activities] + all_activity_ids = [activity.activity_id for activity in activity_events if activity.activity_id is not None] # Save new activity ids new_activities = [] diff --git a/src/apps/activity_flows/service/flow.py b/src/apps/activity_flows/service/flow.py index 391d67d7c68..09622746247 100644 --- a/src/apps/activity_flows/service/flow.py +++ b/src/apps/activity_flows/service/flow.py @@ -15,7 +15,8 @@ from apps.activity_flows.service.flow_item import FlowItemService from apps.applets.crud import UserAppletAccessCRUD from apps.applets.domain.applet_history import Version -from apps.schedule.crud.events import EventCRUD, FlowEventsCRUD +from apps.schedule.crud.events import EventCRUD +from apps.schedule.domain.constants import EventType from apps.schedule.service.schedule import ScheduleService from apps.workspaces.domain.constants import Role @@ -89,7 +90,9 @@ async def update_create( schemas = list() prepared_flow_items = list() - all_flows = [flow.flow_id for flow in await FlowEventsCRUD(self.session).get_by_applet_id(applet_id)] + flow_events = await EventCRUD(self.session).get_by_type_and_applet_id(applet_id, EventType.FLOW) + + all_flows = [flow_event.activity_flow_id for flow_event in flow_events if flow_event.activity_flow_id] # Save new flow ids new_flows = [] diff --git a/src/apps/applets/commands/applet_ema.py b/src/apps/applets/commands/applet_ema.py index f14072bf799..ae08cb5ac72 100644 --- a/src/apps/applets/commands/applet_ema.py +++ b/src/apps/applets/commands/applet_ema.py @@ -24,13 +24,7 @@ from apps.job.constants import JobStatus from apps.job.errors import JobStatusError from apps.job.service import JobService -from apps.schedule.db.schemas import ( - ActivityEventsSchema, - EventSchema, - FlowEventsSchema, - PeriodicitySchema, - UserEventsSchema, -) +from apps.schedule.db.schemas import EventSchema from apps.schedule.domain.constants import PeriodicityType from apps.shared.domain.base import PublicModel from apps.subjects.db.schemas import SubjectSchema @@ -239,27 +233,24 @@ async def get_user_flow_events( select( EventSchema.applet_id, EventSchema.id.label("event_id"), - UserEventsSchema.user_id, - FlowEventsSchema.flow_id, - PeriodicitySchema.type.label("event_type"), + EventSchema.user_id, + EventSchema.activity_flow_id.label("flow_id"), + EventSchema.periodicity.label("event_type"), case( ( - PeriodicitySchema.type.in_(("WEEKDAYS", "DAILY")), + EventSchema.periodicity.in_(("WEEKDAYS", "DAILY")), scheduled_date, ), - (PeriodicitySchema.type.in_(("WEEKLY", "MONTHLY")), PeriodicitySchema.start_date), - else_=PeriodicitySchema.selected_date, + (EventSchema.periodicity.in_(("WEEKLY", "MONTHLY")), EventSchema.start_date), + else_=EventSchema.selected_date, ).label("selected_date"), - PeriodicitySchema.start_date, - PeriodicitySchema.end_date, + EventSchema.start_date, + EventSchema.end_date, EventSchema.start_time, EventSchema.end_time, ) .select_from(EventSchema) - .join(UserEventsSchema, UserEventsSchema.event_id == EventSchema.id) - .join(PeriodicitySchema, PeriodicitySchema.id == EventSchema.periodicity_id) - .join(FlowEventsSchema, FlowEventsSchema.event_id == EventSchema.id) - .where(EventSchema.is_deleted == false(), PeriodicitySchema.type != PeriodicityType.ALWAYS) + .where(EventSchema.is_deleted == false(), EventSchema.periodicity != PeriodicityType.ALWAYS) ).cte("user_flow_events") query = ( @@ -327,14 +318,14 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) case PeriodicityType.DAILY: if row.is_crossday_event: row.end_date += datetime.timedelta(days=1) - if schedule_date >= row.start_date and schedule_date <= row.end_date: + if row.start_date <= schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.ONCE: schedule_start_date = row.selected_date row.end_date = row.selected_date if row.is_crossday_event: row.end_date += datetime.timedelta(days=1) - if schedule_date >= schedule_start_date and schedule_date <= row.end_date: + if schedule_start_date <= schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.WEEKDAYS: last_weekday = FRIDAY_WEEKDAY @@ -342,11 +333,7 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) last_weekday = SATURDAY_WEEKDAY if row.end_date.weekday() == FRIDAY_WEEKDAY: row.end_date += datetime.timedelta(days=1) - if ( - schedule_date.weekday() <= last_weekday - and schedule_date >= row.start_date - and schedule_date <= row.end_date - ): + if schedule_date.weekday() <= last_weekday and row.start_date <= schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.WEEKLY: scheduled_weekday = row.start_date.weekday() @@ -362,10 +349,8 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) if row.start_date.weekday() == row.end_date.weekday(): row.end_date += datetime.timedelta(days=1) if ( - (schedule_date.weekday() == scheduled_weekday or schedule_date.weekday() == following_weekday) - and schedule_date >= row.start_date - and schedule_date <= row.end_date - ): + schedule_date.weekday() == scheduled_weekday or schedule_date.weekday() == following_weekday + ) and row.start_date <= schedule_date <= row.end_date: filtered.append(row) case PeriodicityType.MONTHLY: scheduled_monthday = row.start_date.day @@ -382,14 +367,10 @@ def filter_events(raw_events_rows: list[TRawRow], schedule_date: datetime.date) ): row.end_date += datetime.timedelta(days=1) if ( - ( - schedule_date.day == scheduled_monthday - or schedule_date.day == following_monthday - or (is_last_day_of_month(schedule_date) and row.start_date) - ) - and schedule_date >= row.start_date - and schedule_date <= row.end_date - ): + schedule_date.day == scheduled_monthday + or schedule_date.day == following_monthday + or (is_last_day_of_month(schedule_date) and row.start_date) + ) and row.start_date <= schedule_date <= row.end_date: filtered.append(row) return filtered @@ -505,27 +486,24 @@ async def get_user_activity_events( select( EventSchema.applet_id, EventSchema.id.label("event_id"), - UserEventsSchema.user_id, - ActivityEventsSchema.activity_id, - PeriodicitySchema.type.label("event_type"), + EventSchema.user_id, + EventSchema.activity_id, + EventSchema.periodicity.label("event_type"), case( ( - PeriodicitySchema.type.in_(("WEEKDAYS", "DAILY")), + EventSchema.periodicity.in_(("WEEKDAYS", "DAILY")), scheduled_date, ), - (PeriodicitySchema.type.in_(("WEEKLY", "MONTHLY")), PeriodicitySchema.start_date), - else_=PeriodicitySchema.selected_date, + (EventSchema.periodicity.in_(("WEEKLY", "MONTHLY")), EventSchema.start_date), + else_=EventSchema.selected_date, ).label("selected_date"), - PeriodicitySchema.start_date, - PeriodicitySchema.end_date, + EventSchema.start_date, + EventSchema.end_date, EventSchema.start_time, EventSchema.end_time, ) .select_from(EventSchema) - .join(UserEventsSchema, UserEventsSchema.event_id == EventSchema.id) - .join(PeriodicitySchema, PeriodicitySchema.id == EventSchema.periodicity_id) - .join(ActivityEventsSchema, ActivityEventsSchema.event_id == EventSchema.id) - .where(EventSchema.is_deleted == false(), PeriodicitySchema.type != PeriodicityType.ALWAYS) + .where(EventSchema.is_deleted == false(), EventSchema.periodicity != PeriodicityType.ALWAYS) ).cte("user_activity_events") query = ( diff --git a/src/apps/schedule/crud/events.py b/src/apps/schedule/crud/events.py index 87b7d97f756..583fc0bd8ba 100644 --- a/src/apps/schedule/crud/events.py +++ b/src/apps/schedule/crud/events.py @@ -4,48 +4,31 @@ from sqlalchemy import Integer, update from sqlalchemy.exc import IntegrityError, MultipleResultsFound, NoResultFound from sqlalchemy.orm import Query -from sqlalchemy.sql import and_, delete, distinct, func, or_, select +from sqlalchemy.sql import and_, delete, func, or_, select from sqlalchemy.sql.expression import case, cast from apps.activities.db.schemas import ActivitySchema from apps.activity_flows.db.schemas import ActivityFlowSchema from apps.schedule.db.schemas import ( - ActivityEventsSchema, EventSchema, - FlowEventsSchema, - UserEventsSchema, ) -from apps.schedule.domain.constants import PeriodicityType +from apps.schedule.domain.constants import EventType, PeriodicityType from apps.schedule.domain.schedule.internal import ( - ActivityEvent, - ActivityEventCreate, Event, EventCreate, EventFull, EventUpdate, - FlowEvent, - FlowEventCreate, - UserEvent, - UserEventCreate, ) from apps.schedule.domain.schedule.public import ActivityEventCount, FlowEventCount from apps.schedule.errors import ( - ActivityEventAlreadyExists, EventError, EventNotFoundError, - FlowEventAlreadyExists, - UserEventAlreadyExists, ) from apps.workspaces.db.schemas import UserAppletAccessSchema from apps.workspaces.domain.constants import Role from infrastructure.database import BaseCRUD -__all__ = [ - "EventCRUD", - "UserEventsCRUD", - "ActivityEventsCRUD", - "FlowEventsCRUD", -] +__all__ = ["EventCRUD"] class EventCRUD(BaseCRUD[EventSchema]): @@ -83,17 +66,12 @@ async def get_all_by_applet_id_with_filter( ) -> list[EventSchema]: """Return event instance.""" query: Query = select(EventSchema) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, - ) query = query.where(EventSchema.applet_id == applet_id) query = query.where(EventSchema.is_deleted.is_(False)) if respondent_id: - query = query.where(UserEventsSchema.user_id == respondent_id) + query = query.where(EventSchema.user_id == respondent_id) else: - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 + query = query.where(EventSchema.user_id.is_(None)) result = await self._execute(query) return result.scalars().all() @@ -109,15 +87,10 @@ async def get_all_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: async def get_public_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: """Return event instance.""" query: Query = select(EventSchema) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, - ) query = query.where(EventSchema.applet_id == applet_id) query = query.distinct(EventSchema.id) - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 - query = query.where(EventSchema.is_deleted == False) # noqa: E712 + query = query.where(EventSchema.user_id.is_(None)) + query = query.where(EventSchema.is_deleted.is_(False)) result = await self._execute(query) return result.scalars().all() @@ -160,34 +133,12 @@ async def update(self, pk: uuid.UUID, schema: EventUpdate) -> Event: async def get_all_by_applet_and_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[EventFull]: """Get events by applet_id and user_id""" + query: Query = select(EventSchema) - query: Query = select( - EventSchema, - ActivityEventsSchema.activity_id, - FlowEventsSchema.flow_id, - ) - query = query.join( - UserEventsSchema, - and_( - EventSchema.id == UserEventsSchema.event_id, - UserEventsSchema.user_id == user_id, - ), - ) - - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, + query = query.where( + EventSchema.applet_id == applet_id, EventSchema.user_id == user_id, EventSchema.is_deleted.is_(False) ) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 - db_result = await self._execute(query) events = [] @@ -207,8 +158,9 @@ async def get_all_by_applet_and_user(self, applet_id: uuid.UUID, user_id: uuid.U start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.activity_id, - flow_id=row.flow_id, + activity_id=row.EventSchema.activity_id, + flow_id=row.EventSchema.activity_flow_id, + event_type=row.EventSchema.event_type, ) ) return events @@ -223,32 +175,12 @@ async def get_all_by_applets_and_user( """Get events by applet_ids and user_id Return {applet_id: [EventFull]}""" - query: Query = select( - EventSchema, - ActivityEventsSchema.activity_id, - FlowEventsSchema.flow_id, - ) - query = query.join( - UserEventsSchema, - and_( - EventSchema.id == UserEventsSchema.event_id, - UserEventsSchema.user_id == user_id, - ), - ) - - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, + query: Query = select(EventSchema) + query = query.where( + EventSchema.applet_id.in_(applet_ids), + EventSchema.is_deleted.is_(False), + EventSchema.user_id == user_id, ) - - query = query.where(EventSchema.applet_id.in_(applet_ids)) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 if min_end_date and max_start_date: query = query.where( or_( @@ -284,7 +216,7 @@ async def get_all_by_applets_and_user( id=row.EventSchema.id, start_time=row.EventSchema.start_time, end_time=row.EventSchema.end_time, - access_before_schedule=row.EventSchema.access_before_schedule, # noqa: E501 + access_before_schedule=row.EventSchema.access_before_schedule, one_time_completion=row.EventSchema.one_time_completion, timer=row.EventSchema.timer, timer_type=row.EventSchema.timer_type, @@ -294,8 +226,9 @@ async def get_all_by_applets_and_user( start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.activity_id, - flow_id=row.flow_id, + activity_id=row.EventSchema.activity_id, + flow_id=row.EventSchema.activity_flow_id, + event_type=row.EventSchema.event_type, ) ) @@ -316,26 +249,15 @@ async def get_all_by_applet_and_activity( ) -> list[EventSchema]: """Get events by applet_id and activity_id""" query: Query = select(EventSchema) - query = query.join( - ActivityEventsSchema, - and_( - EventSchema.id == ActivityEventsSchema.event_id, - ActivityEventsSchema.activity_id == activity_id, - ), - ) - # differentiate general and individual events - query = query.join( - UserEventsSchema, - EventSchema.id == UserEventsSchema.event_id, - isouter=True, - ) # select only always available if requested if only_always_available: query = query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted.is_(False)) - - query = query.where(UserEventsSchema.user_id == respondent_id) + query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.activity_id == activity_id, + EventSchema.user_id == respondent_id, + ) result = await self._execute(query) return result.scalars().all() @@ -352,36 +274,18 @@ async def validate_existing_always_available( query = query.select_from(EventSchema) if activity_id: - query = query.join( - ActivityEventsSchema, - and_( - EventSchema.id == ActivityEventsSchema.event_id, - ActivityEventsSchema.activity_id == activity_id, - ), - ) + query = query.where(EventSchema.activity_id == activity_id) if flow_id: - query = query.join( - FlowEventsSchema, - and_( - EventSchema.id == FlowEventsSchema.event_id, - FlowEventsSchema.flow_id == flow_id, - ), - ) - - # differentiate general and individual events - query = query.join( - UserEventsSchema, - EventSchema.id == UserEventsSchema.event_id, - isouter=True, - ) + query = query.where(EventSchema.activity_flow_id == flow_id) query = query.where( EventSchema.periodicity == PeriodicityType.ALWAYS, EventSchema.applet_id == applet_id, EventSchema.is_deleted.is_(False), - UserEventsSchema.user_id == respondent_id, + EventSchema.user_id == respondent_id, ) + query = query.limit(1) result = await self._execute(query) @@ -396,102 +300,48 @@ async def get_all_by_applet_and_flow( ) -> list[EventSchema]: """Get events by applet_id and flow_id""" query: Query = select(EventSchema) - query = query.join( - FlowEventsSchema, - and_( - EventSchema.id == FlowEventsSchema.event_id, - FlowEventsSchema.flow_id == flow_id, - ), - ) - - # differentiate general and individual events - query = query.join( - UserEventsSchema, - EventSchema.id == UserEventsSchema.event_id, - isouter=True, - ) # select only always available if requested if only_always_available: query = query.where(EventSchema.periodicity == PeriodicityType.ALWAYS) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted.is_(False)) - - query = query.where(UserEventsSchema.user_id == respondent_id) + query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == respondent_id, + EventSchema.activity_flow_id == flow_id, + ) result = await self._execute(query) return result.scalars().all() async def get_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[EventFull]: """Get general events by applet_id and user_id""" - # select flow_ids to exclude - flow_ids = ( - select(distinct(FlowEventsSchema.flow_id)) - .select_from(FlowEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == FlowEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == FlowEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id == applet_id) - ) - activity_ids = ( - select(distinct(ActivityEventsSchema.activity_id)) - .select_from(ActivityEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == ActivityEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == ActivityEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id == applet_id) - ) - query: Query = select( - EventSchema, - ActivityEventsSchema.activity_id, - FlowEventsSchema.flow_id, - ) - - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, + # select flow and activity ids to exclude + ids = ( + select( + func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), + ) + .select_from(EventSchema) + .where(EventSchema.user_id == user_id, EventSchema.applet_id == applet_id) + .group_by("entity_id") ) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 + query: Query = select(EventSchema) query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), or_( - FlowEventsSchema.flow_id.is_(None), - FlowEventsSchema.flow_id.not_in(flow_ids), - ) - ) - query = query.where( + EventSchema.activity_flow_id.is_(None), + EventSchema.activity_flow_id.not_in(ids), + ), or_( - ActivityEventsSchema.activity_id.is_(None), - ActivityEventsSchema.activity_id.not_in(activity_ids), - ) + EventSchema.activity_id.is_(None), + EventSchema.activity_id.not_in(ids), + ), + EventSchema.user_id.is_(None), ) - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 db_result = await self._execute(query) @@ -512,8 +362,9 @@ async def get_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.U start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.activity_id, - flow_id=row.flow_id, + activity_id=row.EventSchema.activity_id, + flow_id=row.EventSchema.activity_flow_id, + event_type=row.EventSchema.event_type, ) ) return events @@ -526,73 +377,32 @@ async def get_general_events_by_applets_and_user( max_start_date: date | None = None, ) -> tuple[dict[uuid.UUID, list[EventFull]], set[uuid.UUID]]: """Get general events by applet_id and user_id""" - # select flow_ids to exclude - flow_ids = ( - select(distinct(FlowEventsSchema.flow_id)) - .select_from(FlowEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == FlowEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == FlowEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id.in_(applet_ids)) - ) - activity_ids = ( - select(distinct(ActivityEventsSchema.activity_id)) - .select_from(ActivityEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == ActivityEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == ActivityEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id.in_(applet_ids)) - ) - query: Query = select( - EventSchema, - ActivityEventsSchema.activity_id, - FlowEventsSchema.flow_id, + # select flow and activity ids to exclude + ids = ( + select( + func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), + ) + .select_from(EventSchema) + .where(EventSchema.user_id == user_id, EventSchema.applet_id.in_(applet_ids)) + .group_by("entity_id") ) - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, - ) + query: Query = select(EventSchema) - query = query.where(EventSchema.applet_id.in_(applet_ids)) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 query = query.where( + EventSchema.applet_id.in_(applet_ids), + EventSchema.is_deleted.is_(False), or_( - FlowEventsSchema.flow_id.is_(None), - FlowEventsSchema.flow_id.not_in(flow_ids), - ) - ) - query = query.where( + EventSchema.activity_flow_id.is_(None), + EventSchema.activity_flow_id.not_in(ids), + ), or_( - ActivityEventsSchema.activity_id.is_(None), - ActivityEventsSchema.activity_id.not_in(activity_ids), - ) + EventSchema.activity_id.is_(None), + EventSchema.activity_id.not_in(ids), + ), + EventSchema.user_id.is_(None), ) - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 if min_end_date and max_start_date: query = query.where( or_( @@ -638,8 +448,9 @@ async def get_general_events_by_applets_and_user( start_date=row.EventSchema.start_date, end_date=row.EventSchema.end_date, selected_date=row.EventSchema.selected_date, - activity_id=row.activity_id, - flow_id=row.flow_id, + activity_id=row.EventSchema.activity_id, + flow_id=row.EventSchema.activity_flow_id, + event_type=row.EventSchema.event_type, ) ) @@ -647,70 +458,34 @@ async def get_general_events_by_applets_and_user( async def count_general_events_by_user(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> int: """Count general events by applet_id and user_id""" - flow_ids = ( - select(distinct(FlowEventsSchema.flow_id)) - .select_from(FlowEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == FlowEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == FlowEventsSchema.event_id, - ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id == applet_id) - ) - activity_ids = ( - select(distinct(ActivityEventsSchema.activity_id)) - .select_from(ActivityEventsSchema) - .join( - UserEventsSchema, - UserEventsSchema.event_id == ActivityEventsSchema.event_id, - ) - .join( - EventSchema, - EventSchema.id == ActivityEventsSchema.event_id, + + # select flow and activity ids to exclude + ids = ( + select( + func.coalesce(EventSchema.activity_flow_id, EventSchema.activity_id).label("entity_id"), ) - .where(UserEventsSchema.user_id == user_id) - .where(EventSchema.applet_id == applet_id) + .select_from(EventSchema) + .where(EventSchema.user_id == user_id, EventSchema.applet_id == applet_id) + .group_by("entity_id") ) query: Query = select( func.count(EventSchema.id).label("count"), ) - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == EventSchema.id, - isouter=True, - ) - - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), or_( - FlowEventsSchema.flow_id.is_(None), - FlowEventsSchema.flow_id.not_in(flow_ids), - ) - ) - query = query.where( + EventSchema.activity_flow_id.is_(None), + EventSchema.activity_flow_id.not_in(ids), + ), or_( - ActivityEventsSchema.activity_id.is_(None), - ActivityEventsSchema.activity_id.not_in(activity_ids), - ) + EventSchema.activity_id.is_(None), + EventSchema.activity_id.not_in(ids), + ), + EventSchema.user_id.is_(None), ) - query = query.where(UserEventsSchema.user_id == None) # noqa: E711 db_result = await self._execute(query) return db_result.scalar() @@ -719,16 +494,11 @@ async def count_individual_events_by_user(self, applet_id: uuid.UUID, user_id: u """Count individual events by applet_id and user_id""" query: Query = select(func.count(EventSchema.id)) - query = query.join( - UserEventsSchema, - and_( - EventSchema.id == UserEventsSchema.event_id, - UserEventsSchema.user_id == user_id, - ), + query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == user_id, ) - - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(EventSchema.is_deleted == False) # noqa: E712 db_result = await self._execute(query) return db_result.scalar() @@ -739,21 +509,13 @@ async def get_all_by_activity_flow_ids( is_activity: bool, ) -> list[EventSchema]: """Return events for given activity ids.""" - query: Query = select(self.schema_class) - query = query.where(self.schema_class.applet_id == applet_id) + query: Query = select(EventSchema) + query = query.where(EventSchema.applet_id == applet_id) if is_activity: - query = query.join( - ActivityEventsSchema, - ActivityEventsSchema.event_id == self.schema_class.id, - ) - query = query.where(ActivityEventsSchema.activity_id.in_(activity_ids)) + query = query.where(EventSchema.activity_id.in_(activity_ids)) else: - query = query.join( - FlowEventsSchema, - FlowEventsSchema.event_id == self.schema_class.id, - ) - query = query.where(FlowEventsSchema.flow_id.in_(activity_ids)) + query = query.where(EventSchema.activity_flow_id.in_(activity_ids)) result = await self._execute(query) events = result.scalars().all() @@ -761,323 +523,140 @@ async def get_all_by_activity_flow_ids( async def get_default_schedule_user_ids_by_applet_id(self, applet_id: uuid.UUID) -> list[uuid.UUID]: """Return user ids for default schedule.""" - individual_schedule_users = ( - select(UserEventsSchema.user_id) - .join(EventSchema, UserEventsSchema.event_id == EventSchema.id) - .where(EventSchema.applet_id == applet_id) - .where(EventSchema.is_deleted == False) # noqa: E712 + individual_schedule_users = select(EventSchema.user_id).where( + EventSchema.applet_id == applet_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id.isnot(None), ) + query: Query = select(UserAppletAccessSchema.user_id.label("user_id")) query = query.where(UserAppletAccessSchema.applet_id == applet_id) query = query.where(UserAppletAccessSchema.role == Role.RESPONDENT) - query = query.where(UserAppletAccessSchema.is_deleted == False) # noqa: E712 + query = query.where(UserAppletAccessSchema.is_deleted.is_(False)) query = query.where(UserAppletAccessSchema.user_id.not_in(individual_schedule_users)) result = await self._execute(query) result = result.scalars().all() return result - -class UserEventsCRUD(BaseCRUD[UserEventsSchema]): - schema_class = UserEventsSchema - - async def save(self, schema: UserEventCreate) -> UserEvent: - """Return user event instance and the created information.""" - try: - instance: UserEventsSchema = await self._create(UserEventsSchema(**schema.dict())) - except IntegrityError: - raise UserEventAlreadyExists(user_id=schema.user_id, event_id=schema.event_id) - - user_event: UserEvent = UserEvent.from_orm(instance) - return user_event - - async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: - """Return user event instances.""" - query: Query = select(distinct(UserEventsSchema.user_id)) - query = query.where(UserEventsSchema.event_id == event_id) - query = query.where(UserEventsSchema.is_deleted == False) # noqa: E712 - db_result = await self._execute(query) - - try: - result: uuid.UUID = db_result.scalars().one_or_none() - except MultipleResultsFound: - raise EventError() - - return result - - async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): - """Delete all user events by event ids.""" - query: Query = delete(UserEventsSchema) - query = query.where(UserEventsSchema.event_id.in_(event_ids)) - await self._execute(query) - - async def delete_all_by_events_and_user(self, event_ids: list[uuid.UUID], user_id: uuid.UUID): - """Delete all user events by event ids.""" - query: Query = delete(UserEventsSchema) - query = query.where(UserEventsSchema.event_id.in_(event_ids)) - query = query.where(UserEventsSchema.user_id == user_id) - await self._execute(query) - - -class ActivityEventsCRUD(BaseCRUD[ActivityEventsSchema]): - schema_class = ActivityEventsSchema - - async def save(self, schema: ActivityEventCreate) -> ActivityEvent: - """Return activity event instance and the created information.""" - - try: - instance: ActivityEventsSchema = await self._create(ActivityEventsSchema(**schema.dict())) - except IntegrityError: - raise ActivityEventAlreadyExists(activity_id=schema.activity_id, event_id=schema.event_id) - - activity_event: ActivityEvent = ActivityEvent.from_orm(instance) - return activity_event - - async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: - """Return activity event instances.""" - query: Query = select(ActivityEventsSchema.activity_id) - query = query.where(ActivityEventsSchema.event_id == event_id) - query = query.where( - ActivityEventsSchema.is_deleted == False # noqa: E712 - ) - result = await self._execute(query) - - try: - activity_id = result.scalars().one_or_none() - except MultipleResultsFound: - raise EventError() - return activity_id - - async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): - """Delete all activity events by event ids.""" - query: Query = delete(ActivityEventsSchema) - query = query.where(ActivityEventsSchema.event_id.in_(event_ids)) - await self._execute(query) - - async def count_by_applet(self, applet_id: uuid.UUID) -> list[ActivityEventCount]: - """Return activity ids with event count.""" + async def count_by_activity(self, activity_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: + """Return event count.""" query: Query = select( - ActivitySchema.id, - func.count(ActivityEventsSchema.event_id).label("count"), - ActivitySchema.name, + func.count(EventSchema.id).label("count"), ) - query = query.select_from(ActivitySchema) - query = query.join( - ActivityEventsSchema, - and_( - ActivitySchema.id == ActivityEventsSchema.activity_id, - ActivityEventsSchema.is_deleted == False, # noqa: E712 - ), - isouter=True, + query = query.where( + EventSchema.activity_id == activity_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == respondent_id, ) - query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) - - query = query.filter(ActivitySchema.is_deleted == False) # noqa: E712 - query = query.filter(ActivitySchema.applet_id == applet_id) - query = query.filter(EventSchema.periodicity != PeriodicityType.ALWAYS) - query = query.group_by(ActivitySchema.applet_id, ActivitySchema.id) result = await self._execute(query) - activity_event_counts: list[ActivityEventCount] = [ - ActivityEventCount( - activity_id=activity_id, - count=count, - activity_name=name, - ) - for activity_id, count, name in result - ] - - return activity_event_counts + count: int = result.scalar() + return count - async def count_by_activity(self, activity_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: + async def count_by_flow(self, flow_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: """Return event count.""" query: Query = select( - func.count(ActivityEventsSchema.event_id).label("count"), - ) - query = query.join( - UserEventsSchema, - UserEventsSchema.event_id == ActivityEventsSchema.event_id, - isouter=True, + func.count(EventSchema.id).label("count"), ) - query = query.filter(ActivityEventsSchema.activity_id == activity_id) query = query.filter( - ActivityEventsSchema.is_deleted == False # noqa: E712 + EventSchema.activity_flow_id == flow_id, + EventSchema.is_deleted.is_(False), + EventSchema.user_id == respondent_id, ) - query = query.filter(UserEventsSchema.user_id == respondent_id) result = await self._execute(query) count: int = result.scalar() return count - async def get_by_event_ids(self, event_ids: list[uuid.UUID]) -> list[uuid.UUID]: - """Return activity event instances.""" - query: Query = select(distinct(ActivityEventsSchema.activity_id)) - query = query.where(ActivityEventsSchema.event_id.in_(event_ids)) - result = await self._execute(query) - activity_ids = result.scalars().all() - return activity_ids - - async def get_by_applet_id(self, applet_id: uuid.UUID) -> list[ActivityEvent]: - """Return activity event instances.""" - query: Query = select(ActivityEventsSchema) - query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) - query = query.where(EventSchema.applet_id == applet_id) - result = await self._execute(query) - activity_events = result.scalars().all() - - return [ActivityEvent.from_orm(activity_event) for activity_event in activity_events] + async def count_by_applet(self, applet_id: uuid.UUID) -> tuple[list[ActivityEventCount], list[FlowEventCount]]: + """Return activity ids and flow ids with event count.""" - async def get_by_applet_and_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[ActivityEvent]: - """Return activity event instances.""" - query: Query = select(ActivityEventsSchema) - query = query.join(EventSchema, ActivityEventsSchema.event_id == EventSchema.id) - query = query.join(UserEventsSchema, EventSchema.id == UserEventsSchema.event_id) - query = query.join( - ActivitySchema, - ActivityEventsSchema.activity_id == ActivitySchema.id, + query: Query = select( + ActivitySchema.id.label("activity_id"), + ActivitySchema.name.label("activity_name"), + ActivityFlowSchema.id.label("flow_id"), + ActivityFlowSchema.name.label("flow_name"), + func.count(EventSchema.id).label("count"), ) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(UserEventsSchema.user_id == user_id) - result = await self._execute(query) - activity_events = result.scalars().all() - - return [ActivityEvent.from_orm(activity_event) for activity_event in activity_events] - - async def get_missing_events(self, activity_ids: list[uuid.UUID]) -> list[uuid.UUID]: - query: Query = select(ActivityEventsSchema.activity_id) - query.join( + query = query.select_from(EventSchema) + query = query.join( ActivitySchema, and_( - ActivitySchema.id == ActivityEventsSchema.activity_id, - ActivitySchema.is_reviewable.is_(False), + ActivitySchema.id == EventSchema.activity_id, + ActivitySchema.is_deleted.is_(False), ), + isouter=True, ) - query.where(ActivityEventsSchema.activity_id.in_(activity_ids)) - res = await self._execute(query) - db_result = res.scalars().all() - return list(set(activity_ids) - set(db_result)) - - -class FlowEventsCRUD(BaseCRUD[FlowEventsSchema]): - schema_class = FlowEventsSchema - - async def save(self, schema: FlowEventCreate) -> FlowEvent: - """Return flow event instance and the created information.""" - try: - instance: FlowEventsSchema = await self._create(FlowEventsSchema(**schema.dict())) - except IntegrityError: - raise FlowEventAlreadyExists(flow_id=schema.flow_id, event_id=schema.event_id) - - flow_event: FlowEvent = FlowEvent.from_orm(instance) - return flow_event - - async def get_by_event_id(self, event_id: uuid.UUID) -> uuid.UUID | None: - """Return flow event instances.""" - query: Query = select(FlowEventsSchema.flow_id) - query = query.where(FlowEventsSchema.event_id == event_id) - query = query.where(FlowEventsSchema.is_deleted == False) # noqa: E712 - result = await self._execute(query) - - try: - flow_id: uuid.UUID = result.scalars().one_or_none() - except MultipleResultsFound: - raise EventError(message=f"Event{event_id} is used in multiple flows".format(event_id=event_id)) - - return flow_id - - async def delete_all_by_event_ids(self, event_ids: list[uuid.UUID]): - """Delete all flow events by event ids.""" - query: Query = delete(FlowEventsSchema) - query = query.where(FlowEventsSchema.event_id.in_(event_ids)) - await self._execute(query) - - async def count_by_applet(self, applet_id: uuid.UUID) -> list[FlowEventCount]: - """Return flow ids with event count.""" - - query: Query = select( - ActivityFlowSchema.id, - func.count(FlowEventsSchema.id).label("count"), - ActivityFlowSchema.name, - ) - query = query.select_from(ActivityFlowSchema) - query = query.join( - FlowEventsSchema, + ActivityFlowSchema, and_( - FlowEventsSchema.flow_id == ActivityFlowSchema.id, - FlowEventsSchema.is_deleted == False, # noqa: E712 + ActivityFlowSchema.id == EventSchema.activity_flow_id, + ActivityFlowSchema.is_deleted.is_(False), ), isouter=True, ) - query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) - query = query.filter(ActivityFlowSchema.applet_id == applet_id) - query = query.filter( - ActivityFlowSchema.is_deleted == False # noqa: E712 + query = query.where( + EventSchema.is_deleted.is_(False), + EventSchema.applet_id == applet_id, + EventSchema.periodicity != PeriodicityType.ALWAYS, ) - query = query.filter(EventSchema.periodicity != PeriodicityType.ALWAYS) - query = query.group_by(ActivityFlowSchema.applet_id, ActivityFlowSchema.id) + query = query.group_by(EventSchema.applet_id, ActivitySchema.id, ActivityFlowSchema.id) result = await self._execute(query) - flow_event_counts: list[FlowEventCount] = [ - FlowEventCount( - flow_id=flow_id, - count=count, - flow_name=name, - ) - for flow_id, count, name in result - ] - - return flow_event_counts - - async def get_by_event_ids(self, event_ids: list[uuid.UUID]) -> list[uuid.UUID]: - """Return flow event instances.""" - query: Query = select(distinct(FlowEventsSchema.flow_id)) - query = query.where(FlowEventsSchema.event_id.in_(event_ids)) - result = await self._execute(query) - flow_ids = result.scalars().all() - return flow_ids + activity_event_counts: list[ActivityEventCount] = [] + flow_event_counts: list[FlowEventCount] = [] + + for activity_id, activity_name, flow_id, flow_name, count in result: + if activity_id: + activity_event_counts.append( + ActivityEventCount( + activity_id=activity_id, + count=count, + activity_name=activity_name, + ) + ) + if flow_id: + flow_event_counts.append( + FlowEventCount( + flow_id=flow_id, + count=count, + flow_name=flow_name, + ) + ) - async def count_by_flow(self, flow_id: uuid.UUID, respondent_id: uuid.UUID | None) -> int: - """Return event count.""" + return activity_event_counts, flow_event_counts - query: Query = select( - func.count(FlowEventsSchema.event_id).label("count"), - ) - query = query.join( - UserEventsSchema, - FlowEventsSchema.event_id == UserEventsSchema.event_id, - isouter=True, - ) - query = query.filter(FlowEventsSchema.flow_id == flow_id) - query = query.filter( - FlowEventsSchema.is_deleted == False # noqa: E712 + async def get_activities_without_events(self, activity_ids: list[uuid.UUID]) -> list[uuid.UUID]: + query: Query = select(EventSchema.activity_id) + query.join( + ActivitySchema, + and_( + ActivitySchema.id == EventSchema.activity_id, + ActivitySchema.is_reviewable.is_(False), + ), ) - query = query.filter(UserEventsSchema.user_id == respondent_id) - result = await self._execute(query) - - count: int = result.scalar() - return count + query.where(EventSchema.activity_id.in_(activity_ids)) + res = await self._execute(query) + db_result = res.scalars().all() + return list(set(activity_ids) - set(db_result)) - async def get_by_applet_id(self, applet_id: uuid.UUID) -> list[FlowEvent]: - """Return flow event instances.""" - query: Query = select(FlowEventsSchema) - query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) - query = query.where(EventSchema.applet_id == applet_id) - result = await self._execute(query) - flow_events = result.scalars().all() + async def get_by_type_and_applet_id(self, applet_id: uuid.UUID, event_type: EventType) -> list[Event]: + """Return event instances of type flow.""" + query: Query = select(EventSchema) + query = query.where( + EventSchema.applet_id == applet_id, + EventSchema.event_type == event_type, + ) - return [FlowEvent.from_orm(flow_event) for flow_event in flow_events] + if event_type == EventType.FLOW: + query = query.where(EventSchema.activity_flow_id.isnot(None)) + else: + query = query.where(EventSchema.activity_id.isnot(None)) - async def get_by_applet_and_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> list[FlowEvent]: - """Return flow event instances.""" - query: Query = select(FlowEventsSchema) - query = query.join(EventSchema, FlowEventsSchema.event_id == EventSchema.id) - query = query.join(UserEventsSchema, EventSchema.id == UserEventsSchema.event_id) - query = query.where(EventSchema.applet_id == applet_id) - query = query.where(UserEventsSchema.user_id == user_id) result = await self._execute(query) flow_events = result.scalars().all() - return [FlowEvent.from_orm(flow_event) for flow_event in flow_events] + return [Event.from_orm(flow_event) for flow_event in flow_events] diff --git a/src/apps/schedule/db/schemas.py b/src/apps/schedule/db/schemas.py index bdfade12234..dcc6f654876 100644 --- a/src/apps/schedule/db/schemas.py +++ b/src/apps/schedule/db/schemas.py @@ -1,5 +1,4 @@ import datetime -import uuid from sqlalchemy import Boolean, Column, Date, ForeignKey, Integer, Interval, String, Time, UniqueConstraint, text from sqlalchemy.dialects.postgresql import ENUM, UUID @@ -8,15 +7,6 @@ from infrastructure.database.mixins import HistoryAware -class PeriodicitySchema(Base): - __tablename__ = "periodicity" - - type = Column(String(10), nullable=False) # Options: ONCE, DAILY, WEEKLY, WEEKDAYS, MONTHLY, ALWAYS - start_date = Column(Date, nullable=True) - end_date = Column(Date, nullable=True) - selected_date = Column(Date, nullable=True) - - class _BaseEventSchema: start_time = Column(Time, nullable=True) end_time = Column(Time, nullable=True) @@ -36,17 +26,16 @@ class _BaseEventSchema: start_date = Column(Date, nullable=True) end_date = Column(Date, nullable=True) selected_date = Column(Date, nullable=True) + event_type = Column(ENUM("activity", "flow", name="event_type_enum", create_type=False), nullable=False) + activity_id = Column(UUID(as_uuid=True), nullable=True) + activity_flow_id = Column(UUID(as_uuid=True), nullable=True) class EventSchema(_BaseEventSchema, Base): __tablename__ = "events" - periodicity_id = Column( - UUID(as_uuid=True), - default=lambda: uuid.uuid4(), - server_default=text("gen_random_uuid()"), - ) applet_id = Column(ForeignKey("applets.id", ondelete="CASCADE"), nullable=False) + user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=True) class EventHistorySchema(_BaseEventSchema, HistoryAware, Base): @@ -54,9 +43,6 @@ class EventHistorySchema(_BaseEventSchema, HistoryAware, Base): id_version = Column(String(), primary_key=True) id = Column(UUID(as_uuid=True)) - event_type = Column(ENUM("activity", "flow", name="event_type_enum", create_type=False), nullable=False) - activity_id = Column(UUID(as_uuid=True), nullable=True) - activity_flow_id = Column(UUID(as_uuid=True), nullable=True) user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=True) @@ -76,54 +62,6 @@ class AppletEventsSchema(Base): ) -class UserEventsSchema(Base): - __tablename__ = "user_events" - - user_id = Column(ForeignKey("users.id", ondelete="RESTRICT"), nullable=False) - event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) - - __table_args__ = ( - UniqueConstraint( - "user_id", - "event_id", - "is_deleted", - name="_unique_user_events", - ), - ) - - -class ActivityEventsSchema(Base): - __tablename__ = "activity_events" - - activity_id = Column(UUID(as_uuid=True), nullable=False) - event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) - - __table_args__ = ( - UniqueConstraint( - "activity_id", - "event_id", - "is_deleted", - name="_unique_activity_events", - ), - ) - - -class FlowEventsSchema(Base): - __tablename__ = "flow_events" - - flow_id = Column(UUID(as_uuid=True), nullable=False) - event_id = Column(ForeignKey("events.id", ondelete="CASCADE"), nullable=False) - - __table_args__ = ( - UniqueConstraint( - "flow_id", - "event_id", - "is_deleted", - name="_unique_flow_events", - ), - ) - - class _BaseNotificationSchema: from_time = Column(Time, nullable=True) to_time = Column(Time, nullable=True) diff --git a/src/apps/schedule/domain/schedule/internal.py b/src/apps/schedule/domain/schedule/internal.py index dc47f06c707..f1761eefb6a 100644 --- a/src/apps/schedule/domain/schedule/internal.py +++ b/src/apps/schedule/domain/schedule/internal.py @@ -3,7 +3,7 @@ from pydantic import Field, NonNegativeInt, root_validator -from apps.schedule.domain.constants import AvailabilityType, PeriodicityType, TimerType +from apps.schedule.domain.constants import AvailabilityType, EventType, PeriodicityType, TimerType from apps.schedule.domain.schedule.base import BaseEvent, BaseNotificationSetting, BaseReminderSetting from apps.schedule.domain.schedule.public import ( EventAvailabilityDto, @@ -20,14 +20,8 @@ __all__ = [ "Event", "ScheduleEvent", - "UserEvent", - "ActivityEvent", - "FlowEvent", "EventCreate", "EventUpdate", - "UserEventCreate", - "ActivityEventCreate", - "FlowEventCreate", "EventFull", "NotificationSettingCreate", "NotificationSetting", @@ -45,6 +39,10 @@ class EventCreate(BaseEvent, InternalModel): None, description="If type is WEEKLY, MONTHLY or ONCE, selectedDate must be set.", ) + user_id: uuid.UUID | None = None + activity_id: uuid.UUID | None = None + activity_flow_id: uuid.UUID | None = None + event_type: EventType @root_validator def validate_periodicity(cls, values): @@ -66,39 +64,6 @@ class Event(EventCreate, InternalModel): version: str -class UserEventCreate(InternalModel): - user_id: uuid.UUID - event_id: uuid.UUID - - -class UserEvent(UserEventCreate, InternalModel): - """UserEvent of a schedule""" - - id: uuid.UUID - - -class ActivityEventCreate(InternalModel): - activity_id: uuid.UUID - event_id: uuid.UUID - - -class ActivityEvent(ActivityEventCreate, InternalModel): - """ActivityEvent of a schedule""" - - id: uuid.UUID - - -class FlowEventCreate(InternalModel): - flow_id: uuid.UUID - event_id: uuid.UUID - - -class FlowEvent(FlowEventCreate, InternalModel): - """FlowEvent of a schedule""" - - id: uuid.UUID - - class NotificationSettingCreate(BaseNotificationSetting, InternalModel): event_id: uuid.UUID @@ -128,6 +93,7 @@ class EventFull(InternalModel, BaseEvent): activity_id: uuid.UUID | None = None flow_id: uuid.UUID | None = None version: str + event_type: EventType class ScheduleEvent(EventFull): diff --git a/src/apps/schedule/service/schedule.py b/src/apps/schedule/service/schedule.py index 99f7d21e07a..2356de0780e 100644 --- a/src/apps/schedule/service/schedule.py +++ b/src/apps/schedule/service/schedule.py @@ -6,24 +6,21 @@ from apps.activity_flows.crud import FlowsCRUD from apps.applets.crud import AppletsCRUD, UserAppletAccessCRUD from apps.applets.errors import AppletNotFoundError -from apps.schedule.crud.events import ActivityEventsCRUD, EventCRUD, FlowEventsCRUD, UserEventsCRUD +from apps.schedule.crud.events import EventCRUD from apps.schedule.crud.notification import NotificationCRUD, ReminderCRUD from apps.schedule.crud.schedule_history import NotificationHistoryCRUD, ReminderHistoryCRUD from apps.schedule.db.schemas import EventSchema, NotificationSchema -from apps.schedule.domain.constants import DefaultEvent, PeriodicityType +from apps.schedule.domain.constants import DefaultEvent, EventType, PeriodicityType from apps.schedule.domain.schedule import BaseEvent from apps.schedule.domain.schedule.internal import ( - ActivityEventCreate, Event, EventCreate, EventFull, EventUpdate, - FlowEventCreate, NotificationSetting, ReminderSetting, ReminderSettingCreate, ScheduleEvent, - UserEventCreate, ) from apps.schedule.domain.schedule.public import ( PublicEvent, @@ -96,25 +93,16 @@ async def create_schedule(self, schedule: EventRequest, applet_id: uuid.UUID) -> start_date=schedule.periodicity.start_date, end_date=schedule.periodicity.end_date, selected_date=schedule.periodicity.selected_date, + user_id=schedule.respondent_id, + activity_id=schedule.activity_id, + activity_flow_id=schedule.flow_id, + event_type=EventType.ACTIVITY if schedule.activity_id else EventType.FLOW, ) ) - # Create user event - if schedule.respondent_id: - await UserEventsCRUD(self.session).save(UserEventCreate(event_id=event.id, user_id=schedule.respondent_id)) - # Create event-activity or event-flow - if schedule.activity_id: - await ActivityEventsCRUD(self.session).save( - ActivityEventCreate(event_id=event.id, activity_id=schedule.activity_id) - ) - else: - await FlowEventsCRUD(self.session).save(FlowEventCreate(event_id=event.id, flow_id=schedule.flow_id)) - schedule_event = ScheduleEvent( - **event.dict(exclude={"applet_id"}), - activity_id=schedule.activity_id, - flow_id=schedule.flow_id, - user_id=schedule.respondent_id, + **event.dict(exclude={"applet_id", "activity_flow_id"}), + flow_id=event.activity_flow_id, ) # Create notification and reminder @@ -178,7 +166,6 @@ async def create_schedule(self, schedule: EventRequest, applet_id: uuid.UUID) -> selected_date=event.selected_date, ), respondent_id=schedule.respondent_id, - activity_id=schedule.activity_id, flow_id=schedule.flow_id, notification=notification_public if schedule.notification else None, ) @@ -188,22 +175,18 @@ async def get_schedule_by_id(self, schedule_id: uuid.UUID, applet_id: uuid.UUID) await self._validate_applet(applet_id=applet_id) event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) - user_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=event.id) - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) notification = await self._get_notifications_and_reminder(event.id) return PublicEvent( - **event.dict(exclude={"periodicity"}), + **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=user_id, - activity_id=activity_id, - flow_id=flow_id, + respondent_id=event.user_id, + flow_id=event.activity_flow_id, notification=notification, ) @@ -225,24 +208,19 @@ async def get_all_schedules(self, applet_id: uuid.UUID, query: QueryParams | Non for event_schema in event_schemas: event: Event = Event.from_orm(event_schema) - - user_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=event.id) - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) notification = await self._get_notifications_and_reminder(event.id) events.append( PublicEvent( - **event.dict(exclude={"periodicity"}), + **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=user_id, - activity_id=activity_id, - flow_id=flow_id, + respondent_id=event.user_id, + flow_id=event.activity_flow_id, notification=notification, ) ) @@ -258,8 +236,6 @@ async def get_public_all_schedules(self, key: uuid.UUID) -> PublicEventByUser: full_events: list[EventFull] = [] for event_schema in event_schemas: event: Event = Event.from_orm(event_schema) - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=event.id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=event.id) base_event = BaseEvent(**event.dict()) full_events.append( @@ -270,9 +246,11 @@ async def get_public_all_schedules(self, key: uuid.UUID) -> PublicEventByUser: start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, - activity_id=activity_id, - flow_id=flow_id, + activity_id=event.activity_id, + flow_id=event.activity_flow_id, + user_id=event.user_id, version=event.version, + event_type=event.event_type, ) ) @@ -299,10 +277,6 @@ async def delete_all_schedules(self, applet_id: uuid.UUID): event_schemas: list[EventSchema] = await EventCRUD(self.session).get_all_by_applet_id_with_filter(applet_id) event_ids = [event_schema.id for event_schema in event_schemas] - # Get all activity_ids and flow_ids - activity_ids = await ActivityEventsCRUD(self.session).get_by_event_ids(event_ids) - flow_ids = await FlowEventsCRUD(self.session).get_by_event_ids(event_ids) - await self._delete_by_ids(event_ids) await ScheduleHistoryService(self.session).mark_as_deleted( @@ -310,50 +284,54 @@ async def delete_all_schedules(self, applet_id: uuid.UUID): ) # Create default events for activities and flows - for activity_id in activity_ids: - await self._create_default_event(applet_id=applet_id, activity_id=activity_id, is_activity=True) - - for flow_id in flow_ids: - await self._create_default_event(applet_id=applet_id, activity_id=flow_id, is_activity=False) + processed_activities_and_flows: dict[uuid.UUID, bool] = {} + for event in event_schemas: + if event.activity_id and event.activity_id not in processed_activities_and_flows: + await self._create_default_event( + applet_id=applet_id, + activity_id=event.activity_id, + is_activity=True, + respondent_id=event.user_id, + ) + processed_activities_and_flows[event.activity_id] = True + if event.activity_flow_id and event.activity_flow_id not in processed_activities_and_flows: + await self._create_default_event( + applet_id=applet_id, + activity_id=event.activity_flow_id, + is_activity=False, + respondent_id=event.user_id, + ) + processed_activities_and_flows[event.activity_flow_id] = True async def delete_schedule_by_id(self, schedule_id: uuid.UUID) -> uuid.UUID | None: - event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) - respondent_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) - - # Get activity_id or flow_id if exists - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) + crud = EventCRUD(self.session) + event: Event = await crud.get_by_id(pk=schedule_id) - # Delete event-user, event-activity, event-flow await self._delete_by_ids(event_ids=[schedule_id]) await ScheduleHistoryService(self.session).mark_as_deleted([(event.id, event.version)]) - # Create default event for activity or flow if another event doesn't exist # noqa: E501 - if activity_id: - count_events = await ActivityEventsCRUD(self.session).count_by_activity( - activity_id=activity_id, respondent_id=respondent_id - ) + # Create default event for activity or flow if another event doesn't exist + if event.activity_id: + count_events = await crud.count_by_activity(activity_id=event.activity_id, respondent_id=event.user_id) if count_events == 0: await self._create_default_event( applet_id=event.applet_id, - activity_id=activity_id, + activity_id=event.activity_id, is_activity=True, - respondent_id=respondent_id, + respondent_id=event.user_id, ) - elif flow_id: - count_events = await FlowEventsCRUD(self.session).count_by_flow( - flow_id=flow_id, respondent_id=respondent_id - ) + elif event.activity_flow_id: + count_events = await crud.count_by_flow(flow_id=event.activity_flow_id, respondent_id=event.user_id) if count_events == 0: await self._create_default_event( applet_id=event.applet_id, - activity_id=flow_id, + activity_id=event.activity_flow_id, is_activity=False, - respondent_id=respondent_id, + respondent_id=event.user_id, ) - return respondent_id + return event.user_id async def update_schedule( self, @@ -365,18 +343,15 @@ async def update_schedule( await self._validate_applet(applet_id=applet_id) event: Event = await EventCRUD(self.session).get_by_id(pk=schedule_id) - activity_id = await ActivityEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) - flow_id = await FlowEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) - respondent_id = await UserEventsCRUD(self.session).get_by_event_id(event_id=schedule_id) # Delete all events of this activity or flow - # if new periodicity type is "always" and old periodicity type is not "always" # noqa: E501 - if schedule.periodicity.type == PeriodicityType.ALWAYS and event.periodicity != PeriodicityType.ALWAYS: # noqa: E501 + # if new periodicity type is "always" and old periodicity type is not "always" + if schedule.periodicity.type == PeriodicityType.ALWAYS and event.periodicity != PeriodicityType.ALWAYS: await self._delete_by_activity_or_flow( applet_id=applet_id, - activity_id=activity_id, - flow_id=flow_id, - respondent_id=respondent_id, + activity_id=event.activity_id, + flow_id=event.activity_flow_id, + respondent_id=event.user_id, only_always_available=False, except_event_id=schedule_id, ) @@ -398,14 +373,16 @@ async def update_schedule( start_date=schedule.periodicity.start_date, end_date=schedule.periodicity.end_date, selected_date=schedule.periodicity.selected_date, + event_type=event.event_type, + activity_id=event.activity_id, + activity_flow_id=event.activity_flow_id, + user_id=event.user_id, ), ) schedule_event = ScheduleEvent( - **event.dict(exclude={"applet_id"}), - activity_id=activity_id, - flow_id=flow_id, - user_id=respondent_id, + **event.dict(exclude={"applet_id", "activity_flow_id"}), + flow_id=event.activity_flow_id, ) # Update notification @@ -469,16 +446,15 @@ async def update_schedule( ) return PublicEvent( - **event.dict(exclude={"periodicity"}), + **event.dict(exclude={"periodicity", "user_id", "activity_flow_id"}), periodicity=PublicPeriodicity( type=event.periodicity, start_date=event.start_date, end_date=event.end_date, selected_date=event.selected_date, ), - respondent_id=respondent_id, - activity_id=activity_id, - flow_id=flow_id, + respondent_id=event.user_id, + flow_id=event.activity_flow_id, notification=notification_public, ) @@ -515,46 +491,43 @@ async def count_schedules(self, applet_id: uuid.UUID) -> PublicEventCount: event_count = PublicEventCount(activity_events=[], flow_events=[]) # Get list of activity-event ids - activity_counts = await ActivityEventsCRUD(self.session).count_by_applet(applet_id=applet_id) - - # Get list of flow-event ids - flow_counts = await FlowEventsCRUD(self.session).count_by_applet(applet_id=applet_id) + activity_counts, flow_counts = await EventCRUD(self.session).count_by_applet(applet_id=applet_id) event_count.activity_events = activity_counts if activity_counts else [] event_count.flow_events = flow_counts if flow_counts else [] return event_count - async def delete_by_user_id(self, applet_id, user_id): + async def delete_by_user_id(self, applet_id: uuid.UUID, user_id: uuid.UUID) -> None: # Check if applet exists await self._validate_applet(applet_id=applet_id) # Check if user exists await self._validate_user(user_id=user_id) - # Get list of activity-event ids and flow-event ids for user to create default events # noqa: E501 - activities = await ActivityEventsCRUD(self.session).get_by_applet_and_user_id(applet_id, user_id) + event_schemas = await EventCRUD(self.session).get_all_by_applet_and_user(applet_id, user_id) - activity_ids = {activity.activity_id for activity in activities} + # List of event_ids for user for deletion + event_ids: list[uuid.UUID] = [] + activity_ids: set[uuid.UUID] = set() + flow_ids: set[uuid.UUID] = set() - flows = await FlowEventsCRUD(self.session).get_by_applet_and_user_id(applet_id, user_id) - flow_ids = {flow.flow_id for flow in flows} + for event in event_schemas: + event_ids.append(event.id) + if event.activity_id: + activity_ids.add(event.activity_id) + if event.flow_id: + flow_ids.add(event.flow_id) - # Get list of event_ids for user and delete them all - event_schemas = await EventCRUD(self.session).get_all_by_applet_and_user(applet_id, user_id) - event_ids = [event_schema.id for event_schema in event_schemas] if not event_ids: raise ScheduleNotFoundError() - await self._delete_by_ids( - event_ids=event_ids, - user_id=user_id, - ) + await self._delete_by_ids(event_ids=event_ids) await ScheduleHistoryService(self.session).mark_as_deleted( [(event.id, event.version) for event in event_schemas] ) - # Create AA events for all activities and flows + # Create always available events for all activities and flows await self.create_default_schedules( applet_id=applet_id, activity_ids=list(activity_ids), @@ -616,8 +589,7 @@ async def _delete_by_activity_or_flow( only_always_available, ) - clean_events = [event for event in event_schemas if event.id != except_event_id] - event_ids = [event.id for event in clean_events] + event_ids = [event.id for event in event_schemas if event.id != except_event_id] if event_ids: await self._delete_by_ids(event_ids=event_ids) @@ -628,18 +600,7 @@ async def _delete_by_activity_or_flow( async def _delete_by_ids( self, event_ids: list[uuid.UUID], - user_id: uuid.UUID | None = None, ): - if user_id: - await UserEventsCRUD(self.session).delete_all_by_events_and_user( - event_ids, - user_id, - ) - else: - await UserEventsCRUD(self.session).delete_all_by_event_ids(event_ids) - - await ActivityEventsCRUD(self.session).delete_all_by_event_ids(event_ids) - await FlowEventsCRUD(self.session).delete_all_by_event_ids(event_ids) await NotificationCRUD(self.session).delete_by_event_ids(event_ids) await ReminderCRUD(self.session).delete_by_event_ids(event_ids) await EventCRUD(self.session).delete_by_ids(event_ids) @@ -890,10 +851,7 @@ async def remove_individual_calendar(self, user_id: uuid.UUID, applet_id: uuid.U if not event_ids: raise ScheduleNotFoundError() - await self._delete_by_ids( - event_ids=event_ids, - user_id=user_id, - ) + await self._delete_by_ids(event_ids=event_ids) await ScheduleHistoryService(self.session).mark_as_deleted( [(event.id, event.version) for event in event_schemas] @@ -955,7 +913,7 @@ async def create_default_schedules_if_not_exist( activity_ids: list[uuid.UUID], ) -> None: """Create default schedules for applet.""" - activities_without_events = await ActivityEventsCRUD(self.session).get_missing_events(activity_ids) + activities_without_events = await EventCRUD(self.session).get_activities_without_events(activity_ids) await self.create_default_schedules( applet_id=applet_id, activity_ids=activities_without_events, diff --git a/src/apps/workspaces/crud/user_applet_access.py b/src/apps/workspaces/crud/user_applet_access.py index b2f46c1c970..05579baee1b 100644 --- a/src/apps/workspaces/crud/user_applet_access.py +++ b/src/apps/workspaces/crud/user_applet_access.py @@ -29,7 +29,7 @@ from apps.applets.db.schemas import AppletSchema from apps.invitations.constants import InvitationStatus from apps.invitations.db import InvitationSchema -from apps.schedule.db.schemas import EventSchema, UserEventsSchema +from apps.schedule.db.schemas import EventSchema from apps.shared.encryption import get_key from apps.shared.filtering import FilterField, Filtering from apps.shared.ordering import Ordering @@ -402,10 +402,9 @@ async def get_workspace_respondents( workspace_applets_sq = self.workspace_applets_subquery(owner_id, applet_id) schedule_exists = ( - select(UserEventsSchema) - .join(EventSchema, EventSchema.id == UserEventsSchema.event_id) + select(EventSchema) .where( - UserEventsSchema.user_id == UserAppletAccessSchema.user_id, + EventSchema.user_id == UserAppletAccessSchema.user_id, EventSchema.applet_id == UserAppletAccessSchema.applet_id, ) .exists() @@ -1047,12 +1046,11 @@ async def get_respondent_accesses_by_owner_id( page: int, limit: int, ) -> list[RespondentAppletAccess]: - individual_event_query: Query = select(UserEventsSchema.id) - individual_event_query = individual_event_query.join(EventSchema, EventSchema.id == UserEventsSchema.event_id) + individual_event_query: Query = select(EventSchema.id) individual_event_query = individual_event_query.where( - UserEventsSchema.user_id == UserAppletAccessSchema.user_id + EventSchema.user_id == UserAppletAccessSchema.user_id, + EventSchema.applet_id == UserAppletAccessSchema.applet_id, ) - individual_event_query = individual_event_query.where(EventSchema.applet_id == UserAppletAccessSchema.applet_id) query: Query = select( SubjectSchema.secret_user_id, diff --git a/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py b/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py new file mode 100644 index 00000000000..69c43bf080d --- /dev/null +++ b/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py @@ -0,0 +1,174 @@ +"""Clean up schedule tables + +Revision ID: 3059a8ad6ec5 +Revises: 7c7e30fa96a4 +Create Date: 2025-02-02 18:39:01.011295 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "3059a8ad6ec5" +down_revision = "7c7e30fa96a4" +branch_labels = None +depends_on = None + +EVENT_TYPE_ENUM = 'event_type_enum' +EVENT_TYPE_ENUM_VALUES = ['activity', 'flow'] + + +def upgrade() -> None: + # Add columns `event_type`, `activity_id`, `activity_flow_id`, and `user_id` to `events` + op.add_column("events", sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=True)) + op.add_column("events", sa.Column("activity_flow_id", postgresql.UUID(as_uuid=True), nullable=True)) + op.add_column("events", sa.Column("user_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("users.id", ondelete="RESTRICT"), nullable=True)) + op.add_column("events", sa.Column("event_type", postgresql.ENUM(*EVENT_TYPE_ENUM_VALUES, name=EVENT_TYPE_ENUM, create_type=False), nullable=True)) + + # Migrate data from `activity_events`, `flow_events`, and `user_events` to `events` + op.execute(""" + UPDATE events dst + SET activity_id=ae.activity_id, + activity_flow_id = fe.flow_id, + user_id=ue.user_id, + event_type=(CASE WHEN ae.activity_id IS NOT NULL THEN 'activity' ELSE 'flow' END)::event_type_enum + FROM events e + LEFT JOIN activity_events ae ON e.id = ae.event_id + LEFT JOIN flow_events fe ON e.id = fe.event_id + LEFT JOIN user_events ue ON e.id = ue.event_id + WHERE dst.id = e.id + """) + + # Make sure that the `event_type` column is not null + op.alter_column("events", "event_type", nullable=False) + + # Drop the `periodicity_id` column from the `events` table + op.drop_column("events", "periodicity_id") + + # Drop tables + op.drop_table("activity_events") + op.drop_table("flow_events") + op.drop_table("user_events") + op.drop_table("periodicity") + + +def downgrade() -> None: + # Recreate the dropped tables + op.create_table( + "activity_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_activity_events")), + sa.UniqueConstraint("activity_id", "event_id", "is_deleted", name="_unique_activity_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_activity_events_event_id_events"), ondelete="CASCADE"), + ) + + op.create_table( + "flow_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("flow_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_flow_events")), + sa.UniqueConstraint("flow_id", "event_id", "is_deleted", name="_unique_flow_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_flow_events_event_id_events"), ondelete="CASCADE"), + ) + + op.create_table( + "user_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_user_events")), + sa.UniqueConstraint("user_id", "event_id", "is_deleted", name="_unique_user_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_user_events_event_id_events"), ondelete="CASCADE"), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], name=op.f("fk_user_events_user_id_users"), ondelete="RESTRICT"), + ) + + op.create_table( + "periodicity", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("type", sa.String(10), nullable=False), + sa.Column("start_date", sa.Date(), nullable=True), + sa.Column("end_date", sa.Date(), nullable=True), + sa.Column("selected_date", sa.Date(), nullable=True), + sa.PrimaryKeyConstraint("id", name=op.f("pk_periodicity")), + ) + + # Add the `periodicity_id` column back to the `events` table + op.add_column( + "events", + sa.Column( + "periodicity_id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False + ) + ) + + # Generate periodicity IDs for existing events + op.execute(""" + UPDATE events + SET periodicity_id = gen_random_uuid() + WHERE periodicity_id IS NULL + """) + + # Repopulate the `activity_events`, `flow_events`, `user_events`, and `periodicity` tables + # We do lose some data here (e.g. the original `id`, `created_at`, `updated_at`, `migrated_date`, `migrated_updated`), + # because we can't recover that data from the `events` table + op.execute(""" + INSERT INTO activity_events (id, is_deleted, activity_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.activity_id, e.id + FROM events e + WHERE e.activity_id IS NOT NULL + AND e.event_type = 'activity' + """) + + op.execute(""" + INSERT INTO flow_events (id, is_deleted, flow_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.activity_flow_id, e.id + FROM events e + WHERE e.activity_flow_id IS NOT NULL + AND e.event_type = 'flow' + """) + + op.execute(""" + INSERT INTO user_events (id, is_deleted, user_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.user_id, e.id + FROM events e + WHERE e.user_id IS NOT NULL + """) + + op.execute(""" + INSERT INTO periodicity (id, is_deleted, type, start_date, end_date, selected_date) + SELECT e.periodicity_id, e.is_deleted, e.periodicity, e.start_date, e.end_date, e.selected_date + FROM events e + """) + + # Drop the new columns from the `events` table + op.drop_column("events", "activity_id") + op.drop_column("events", "activity_flow_id") + op.drop_column("events", "user_id") + op.drop_column("events", "event_type") From 915061aea29f106c4fe3386be5e18c017ea12b45 Mon Sep 17 00:00:00 2001 From: Kenroy Gobourne <14842108+sultanofcardio@users.noreply.github.com> Date: Mon, 24 Feb 2025 14:18:16 -0500 Subject: [PATCH 09/14] chore: Restore *_event tables (M2-8495) (#1752) This PR restores the following tables that were previous removed as part of #1723: - activity_events - flow_events - user_events This restoration is optional and should only run if the tables are missing. So it should run on dev, but do nothing on UAT. The rest of the code remains unchanged, and this change is made on Jody's suggestion to split this across two releases. There will be a follow-up PR that drops these tables in about a week or so --- ..._02_18_39-clean_up_schedule_tables_pt_1.py | 102 +++++++++++++++++ ...5_02_24_12_25-restore_tables_optionally.py | 107 ++++++++++++++++++ 2 files changed, 209 insertions(+) create mode 100644 src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables_pt_1.py create mode 100644 src/infrastructure/database/migrations/versions/2025_02_24_12_25-restore_tables_optionally.py diff --git a/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables_pt_1.py b/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables_pt_1.py new file mode 100644 index 00000000000..06178d78be7 --- /dev/null +++ b/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables_pt_1.py @@ -0,0 +1,102 @@ +"""Clean up schedule tables + +Revision ID: 3059a8ad6ec5 +Revises: 7c7e30fa96a4 +Create Date: 2025-02-02 18:39:01.011295 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "3059a8ad6ec5" +down_revision = "7c7e30fa96a4" +branch_labels = None +depends_on = None + +EVENT_TYPE_ENUM = 'event_type_enum' +EVENT_TYPE_ENUM_VALUES = ['activity', 'flow'] + + +def upgrade() -> None: + # Add columns `event_type`, `activity_id`, `activity_flow_id`, and `user_id` to `events` + op.add_column("events", sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=True)) + op.add_column("events", sa.Column("activity_flow_id", postgresql.UUID(as_uuid=True), nullable=True)) + op.add_column("events", sa.Column("user_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("users.id", ondelete="RESTRICT"), nullable=True)) + op.add_column("events", sa.Column("event_type", postgresql.ENUM(*EVENT_TYPE_ENUM_VALUES, name=EVENT_TYPE_ENUM, create_type=False), nullable=True)) + + # Migrate data from `activity_events`, `flow_events`, and `user_events` to `events` + op.execute(""" + UPDATE events dst + SET activity_id=ae.activity_id, + activity_flow_id = fe.flow_id, + user_id=ue.user_id, + event_type=(CASE WHEN ae.activity_id IS NOT NULL THEN 'activity' ELSE 'flow' END)::event_type_enum + FROM events e + LEFT JOIN activity_events ae ON e.id = ae.event_id + LEFT JOIN flow_events fe ON e.id = fe.event_id + LEFT JOIN user_events ue ON e.id = ue.event_id + WHERE dst.id = e.id + """) + + # Make sure that the `event_type` column is not null + op.alter_column("events", "event_type", nullable=False) + + # Drop the `periodicity_id` column from the `events` table + op.drop_column("events", "periodicity_id") + + # Drop the periodicity table + op.drop_table("periodicity") + + +def downgrade() -> None: + # Recreate the dropped tables + op.create_table( + "periodicity", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("type", sa.String(10), nullable=False), + sa.Column("start_date", sa.Date(), nullable=True), + sa.Column("end_date", sa.Date(), nullable=True), + sa.Column("selected_date", sa.Date(), nullable=True), + sa.PrimaryKeyConstraint("id", name=op.f("pk_periodicity")), + ) + + # Add the `periodicity_id` column back to the `events` table + op.add_column( + "events", + sa.Column( + "periodicity_id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False + ) + ) + + # Generate periodicity IDs for existing events + op.execute(""" + UPDATE events + SET periodicity_id = gen_random_uuid() + WHERE periodicity_id IS NULL + """) + + # Repopulate the `periodicity` table + # We do lose some data here (e.g. the original `id`, `created_at`, `updated_at`, `migrated_date`, `migrated_updated`), + # because we can't recover that data from the `events` table + op.execute(""" + INSERT INTO periodicity (id, is_deleted, type, start_date, end_date, selected_date) + SELECT e.periodicity_id, e.is_deleted, e.periodicity, e.start_date, e.end_date, e.selected_date + FROM events e + """) + + # Drop the new columns from the `events` table + op.drop_column("events", "activity_id") + op.drop_column("events", "activity_flow_id") + op.drop_column("events", "user_id") + op.drop_column("events", "event_type") diff --git a/src/infrastructure/database/migrations/versions/2025_02_24_12_25-restore_tables_optionally.py b/src/infrastructure/database/migrations/versions/2025_02_24_12_25-restore_tables_optionally.py new file mode 100644 index 00000000000..c0ed958d724 --- /dev/null +++ b/src/infrastructure/database/migrations/versions/2025_02_24_12_25-restore_tables_optionally.py @@ -0,0 +1,107 @@ +"""Restore tables optionally + +Revision ID: 70987d489b17 +Revises: 3059a8ad6ec5 +Create Date: 2025-02-24 12:25:54.170519 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "70987d489b17" +down_revision = "3059a8ad6ec5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Recreate the dropped tables (if necessary) from the previous version of the previous migration (3059a8ad6ec5) + op.create_table( + "activity_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_activity_events")), + sa.UniqueConstraint("activity_id", "event_id", "is_deleted", name="_unique_activity_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_activity_events_event_id_events"), + ondelete="CASCADE"), + if_not_exists=True, + ) + + op.create_table( + "flow_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("flow_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_flow_events")), + sa.UniqueConstraint("flow_id", "event_id", "is_deleted", name="_unique_flow_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_flow_events_event_id_events"), + ondelete="CASCADE"), + if_not_exists=True, + ) + + op.create_table( + "user_events", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), + sa.Column("migrated_date", sa.DateTime(), nullable=True), + sa.Column("migrated_updated", sa.DateTime(), nullable=True), + sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_user_events")), + sa.UniqueConstraint("user_id", "event_id", "is_deleted", name="_unique_user_events"), + sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_user_events_event_id_events"), + ondelete="CASCADE"), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], name=op.f("fk_user_events_user_id_users"), + ondelete="RESTRICT"), + if_not_exists=True, + ) + + # Repopulate the `activity_events`, `flow_events`, and `user_events` tables only if they are empty. This is to + # Undo the previous version of the previous migration (3059a8ad6ec5) that dropped these tables + # We do lose some data here (e.g. the original `id`, `created_at`, `updated_at`, `migrated_date`, `migrated_updated`), + # because we can't recover that data from the `events` table + op.execute(""" + INSERT INTO activity_events (id, is_deleted, activity_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.activity_id, e.id + FROM events e + WHERE e.activity_id IS NOT NULL + AND e.event_type = 'activity' + AND NOT EXISTS (SELECT 1 FROM activity_events) + """) + + op.execute(""" + INSERT INTO flow_events (id, is_deleted, flow_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.activity_flow_id, e.id + FROM events e + WHERE e.activity_flow_id IS NOT NULL + AND e.event_type = 'flow' + AND NOT EXISTS (SELECT 1 FROM flow_events) + """) + + op.execute(""" + INSERT INTO user_events (id, is_deleted, user_id, event_id) + SELECT gen_random_uuid(), e.is_deleted, e.user_id, e.id + FROM events e + WHERE e.user_id IS NOT NULL + AND NOT EXISTS (SELECT 1 FROM user_events) + """) + + +def downgrade() -> None: + pass From e5a20778a4ba0f67284468871a513ffa01256b3b Mon Sep 17 00:00:00 2001 From: sultanofcardio <14842108+sultanofcardio@users.noreply.github.com> Date: Mon, 24 Feb 2025 14:28:27 -0500 Subject: [PATCH 10/14] Remove duplicate migration file --- ...25_02_02_18_39-clean_up_schedule_tables.py | 174 ------------------ 1 file changed, 174 deletions(-) delete mode 100644 src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py diff --git a/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py b/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py deleted file mode 100644 index 69c43bf080d..00000000000 --- a/src/infrastructure/database/migrations/versions/2025_02_02_18_39-clean_up_schedule_tables.py +++ /dev/null @@ -1,174 +0,0 @@ -"""Clean up schedule tables - -Revision ID: 3059a8ad6ec5 -Revises: 7c7e30fa96a4 -Create Date: 2025-02-02 18:39:01.011295 - -""" - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = "3059a8ad6ec5" -down_revision = "7c7e30fa96a4" -branch_labels = None -depends_on = None - -EVENT_TYPE_ENUM = 'event_type_enum' -EVENT_TYPE_ENUM_VALUES = ['activity', 'flow'] - - -def upgrade() -> None: - # Add columns `event_type`, `activity_id`, `activity_flow_id`, and `user_id` to `events` - op.add_column("events", sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=True)) - op.add_column("events", sa.Column("activity_flow_id", postgresql.UUID(as_uuid=True), nullable=True)) - op.add_column("events", sa.Column("user_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("users.id", ondelete="RESTRICT"), nullable=True)) - op.add_column("events", sa.Column("event_type", postgresql.ENUM(*EVENT_TYPE_ENUM_VALUES, name=EVENT_TYPE_ENUM, create_type=False), nullable=True)) - - # Migrate data from `activity_events`, `flow_events`, and `user_events` to `events` - op.execute(""" - UPDATE events dst - SET activity_id=ae.activity_id, - activity_flow_id = fe.flow_id, - user_id=ue.user_id, - event_type=(CASE WHEN ae.activity_id IS NOT NULL THEN 'activity' ELSE 'flow' END)::event_type_enum - FROM events e - LEFT JOIN activity_events ae ON e.id = ae.event_id - LEFT JOIN flow_events fe ON e.id = fe.event_id - LEFT JOIN user_events ue ON e.id = ue.event_id - WHERE dst.id = e.id - """) - - # Make sure that the `event_type` column is not null - op.alter_column("events", "event_type", nullable=False) - - # Drop the `periodicity_id` column from the `events` table - op.drop_column("events", "periodicity_id") - - # Drop tables - op.drop_table("activity_events") - op.drop_table("flow_events") - op.drop_table("user_events") - op.drop_table("periodicity") - - -def downgrade() -> None: - # Recreate the dropped tables - op.create_table( - "activity_events", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("is_deleted", sa.Boolean(), nullable=True), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("migrated_date", sa.DateTime(), nullable=True), - sa.Column("migrated_updated", sa.DateTime(), nullable=True), - sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("activity_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("pk_activity_events")), - sa.UniqueConstraint("activity_id", "event_id", "is_deleted", name="_unique_activity_events"), - sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_activity_events_event_id_events"), ondelete="CASCADE"), - ) - - op.create_table( - "flow_events", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("is_deleted", sa.Boolean(), nullable=True), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("migrated_date", sa.DateTime(), nullable=True), - sa.Column("migrated_updated", sa.DateTime(), nullable=True), - sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("flow_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("pk_flow_events")), - sa.UniqueConstraint("flow_id", "event_id", "is_deleted", name="_unique_flow_events"), - sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_flow_events_event_id_events"), ondelete="CASCADE"), - ) - - op.create_table( - "user_events", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("is_deleted", sa.Boolean(), nullable=True), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("migrated_date", sa.DateTime(), nullable=True), - sa.Column("migrated_updated", sa.DateTime(), nullable=True), - sa.Column("event_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("pk_user_events")), - sa.UniqueConstraint("user_id", "event_id", "is_deleted", name="_unique_user_events"), - sa.ForeignKeyConstraint(["event_id"], ["events.id"], name=op.f("fk_user_events_event_id_events"), ondelete="CASCADE"), - sa.ForeignKeyConstraint(["user_id"], ["users.id"], name=op.f("fk_user_events_user_id_users"), ondelete="RESTRICT"), - ) - - op.create_table( - "periodicity", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("is_deleted", sa.Boolean(), nullable=True), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("timezone('utc', now())"), nullable=True), - sa.Column("migrated_date", sa.DateTime(), nullable=True), - sa.Column("migrated_updated", sa.DateTime(), nullable=True), - sa.Column("type", sa.String(10), nullable=False), - sa.Column("start_date", sa.Date(), nullable=True), - sa.Column("end_date", sa.Date(), nullable=True), - sa.Column("selected_date", sa.Date(), nullable=True), - sa.PrimaryKeyConstraint("id", name=op.f("pk_periodicity")), - ) - - # Add the `periodicity_id` column back to the `events` table - op.add_column( - "events", - sa.Column( - "periodicity_id", - postgresql.UUID(as_uuid=True), - server_default=sa.text("gen_random_uuid()"), - nullable=False - ) - ) - - # Generate periodicity IDs for existing events - op.execute(""" - UPDATE events - SET periodicity_id = gen_random_uuid() - WHERE periodicity_id IS NULL - """) - - # Repopulate the `activity_events`, `flow_events`, `user_events`, and `periodicity` tables - # We do lose some data here (e.g. the original `id`, `created_at`, `updated_at`, `migrated_date`, `migrated_updated`), - # because we can't recover that data from the `events` table - op.execute(""" - INSERT INTO activity_events (id, is_deleted, activity_id, event_id) - SELECT gen_random_uuid(), e.is_deleted, e.activity_id, e.id - FROM events e - WHERE e.activity_id IS NOT NULL - AND e.event_type = 'activity' - """) - - op.execute(""" - INSERT INTO flow_events (id, is_deleted, flow_id, event_id) - SELECT gen_random_uuid(), e.is_deleted, e.activity_flow_id, e.id - FROM events e - WHERE e.activity_flow_id IS NOT NULL - AND e.event_type = 'flow' - """) - - op.execute(""" - INSERT INTO user_events (id, is_deleted, user_id, event_id) - SELECT gen_random_uuid(), e.is_deleted, e.user_id, e.id - FROM events e - WHERE e.user_id IS NOT NULL - """) - - op.execute(""" - INSERT INTO periodicity (id, is_deleted, type, start_date, end_date, selected_date) - SELECT e.periodicity_id, e.is_deleted, e.periodicity, e.start_date, e.end_date, e.selected_date - FROM events e - """) - - # Drop the new columns from the `events` table - op.drop_column("events", "activity_id") - op.drop_column("events", "activity_flow_id") - op.drop_column("events", "user_id") - op.drop_column("events", "event_type") From 121737f4c8e9fbb51f963a51990a21a9db3f33ed Mon Sep 17 00:00:00 2001 From: Marty Date: Wed, 26 Feb 2025 11:24:30 -0800 Subject: [PATCH 11/14] fix: displays translated subscale q/a in dataviz and data export (#1759) * fix: displays translated subscale q/a in dataviz and data export Updates the script to update the subscale items in activity_items_histories, which is where the BE sources the submissions from for the dataviz and data export * fix: ruff formatting errors * fix: use date when ordering instead Original ordering by id_version works up to 1.9.0 but because it orders by text it incorrectly orders that higher than 1.10.0 * chore: Add logging --------- Co-authored-by: Marty <2614025+mbanting@users.noreply.github.com> --- .../m2_8568_update_subscale_items_to_greek.py | 91 ++++++++++++++++++- 1 file changed, 88 insertions(+), 3 deletions(-) diff --git a/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py b/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py index ffda637392e..82f3542a804 100644 --- a/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py +++ b/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py @@ -1,12 +1,17 @@ import uuid from uuid import UUID -from sqlalchemy import cast, func, select, update +from sqlalchemy import cast, desc, func, select, update from sqlalchemy.cimmutabledict import immutabledict from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.ext.asyncio import AsyncSession -from apps.activities.db.schemas import ActivityItemSchema, ActivitySchema +from apps.activities.db.schemas import ( + ActivityHistorySchema, + ActivityItemHistorySchema, + ActivityItemSchema, + ActivitySchema, +) async def update_age_screen(session: AsyncSession, applet_id: UUID): @@ -31,6 +36,40 @@ async def update_age_screen(session: AsyncSession, applet_id: UUID): ) await session.execute(update_query, execution_options=immutabledict({"synchronize_session": "fetch"})) + + # Determine the current version of the activity_id + subquery = ( + select(ActivityHistorySchema.id_version) + .join(ActivityItemSchema, ActivityHistorySchema.id == ActivityItemSchema.activity_id) + .join(ActivitySchema, ActivityItemSchema.activity_id == ActivitySchema.id) + .where(ActivityItemSchema.name == "age_screen", ActivitySchema.applet_id == applet_id) + .order_by(desc(ActivityHistorySchema.updated_at)) + .limit(1) + ) + + current_version_activity_id = await session.execute(subquery) + current_version_activity_id = current_version_activity_id.scalar() + + print("Current activity version id: ", current_version_activity_id) + + # Update the ActivityItemHistorySchema table + update_history_query = ( + update(ActivityItemHistorySchema) + .where( + ActivityItemHistorySchema.name == "age_screen", + ActivityItemHistorySchema.activity_id == current_version_activity_id, + ) + .values( + question=func.jsonb_set( + ActivityItemHistorySchema.question, + ["en"], + cast(new_question_value["el"], JSONB), + True, + ) + ) + ) + + await session.execute(update_history_query, execution_options=immutabledict({"synchronize_session": "fetch"})) print(f"Updated age screen for applet_id: {applet_id}") @@ -43,7 +82,7 @@ async def update_gender_screen(session: AsyncSession, applet_id: UUID): "Female": "Γυναίκα", } - query = select(ActivityItemSchema.id, ActivityItemSchema.response_values).where( + query = select(ActivityItemSchema.id, ActivityItemSchema.response_values, ActivityItemSchema.activity_id).where( ActivityItemSchema.name == "gender_screen", ActivityItemSchema.activity_id.in_(select(ActivitySchema.id).where(ActivitySchema.applet_id == applet_id)), ) @@ -94,6 +133,52 @@ async def update_gender_screen(session: AsyncSession, applet_id: UUID): ) await session.execute(update_query, execution_options=immutabledict({"synchronize_session": "fetch"})) + + # Determine the current version of the activity_id + subquery = ( + select(ActivityHistorySchema.id_version) + .where(ActivityHistorySchema.id == item.activity_id) + .order_by(desc(ActivityHistorySchema.updated_at)) + .limit(1) + ) + + current_version_activity_id = await session.execute(subquery) + current_version_activity_id = current_version_activity_id.scalar() + + print("Current activity version id: ", current_version_activity_id) + + # Update the ActivityItemHistorySchema table + update_history_response_values = func.jsonb_set( + func.jsonb_set( + ActivityItemHistorySchema.response_values, + ["options", str(male_index), "text"], + cast(translations["Male"], JSONB), + True, + ), + ["options", str(female_index), "text"], + cast(translations["Female"], JSONB), + True, + ) + + update_history_query = ( + update(ActivityItemHistorySchema) + .where( + ActivityItemHistorySchema.name == "gender_screen", + ActivityItemHistorySchema.activity_id == current_version_activity_id, + ) + .values( + question=func.jsonb_set( + ActivityItemHistorySchema.question, + ["en"], + cast(new_question_value["el"], JSONB), + True, + ), + response_values=update_history_response_values, + ) + ) + + await session.execute(update_history_query, execution_options=immutabledict({"synchronize_session": "fetch"})) + print(f"Updated gender screen for applet_id: {applet_id}") From 2d4bb518f24021011b66ed7856f4f8a0f3cce83f Mon Sep 17 00:00:00 2001 From: Farmer Paul Date: Wed, 26 Feb 2025 10:26:18 -0400 Subject: [PATCH 12/14] fix: spelling of "male" in Greek (#1758) --- .../commands/patches/m2_8568_update_subscale_items_to_greek.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py b/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py index 82f3542a804..076dd627a8a 100644 --- a/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py +++ b/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py @@ -78,7 +78,7 @@ async def update_gender_screen(session: AsyncSession, applet_id: UUID): new_question_value = {"el": "Ποιο φύλο σας αποδόθηκε κατά την γέννησή σας;"} translations = { - "Male": "Ανδρας", + "Male": "Άντρας", "Female": "Γυναίκα", } From 33f10e355a93da1e16c38559b61dac0d734e037e Mon Sep 17 00:00:00 2001 From: Marty Date: Thu, 27 Feb 2025 04:54:21 -0800 Subject: [PATCH 13/14] fix: translate age subscale question with multiple activities (#1762) Co-authored-by: Marty <2614025+mbanting@users.noreply.github.com> --- .../m2_8568_update_subscale_items_to_greek.py | 59 +++++++++++-------- 1 file changed, 34 insertions(+), 25 deletions(-) diff --git a/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py b/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py index 076dd627a8a..9a22d665d44 100644 --- a/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py +++ b/src/apps/shared/commands/patches/m2_8568_update_subscale_items_to_greek.py @@ -37,39 +37,48 @@ async def update_age_screen(session: AsyncSession, applet_id: UUID): await session.execute(update_query, execution_options=immutabledict({"synchronize_session": "fetch"})) - # Determine the current version of the activity_id - subquery = ( - select(ActivityHistorySchema.id_version) - .join(ActivityItemSchema, ActivityHistorySchema.id == ActivityItemSchema.activity_id) - .join(ActivitySchema, ActivityItemSchema.activity_id == ActivitySchema.id) - .where(ActivityItemSchema.name == "age_screen", ActivitySchema.applet_id == applet_id) - .order_by(desc(ActivityHistorySchema.updated_at)) - .limit(1) + query = select(ActivityItemSchema.activity_id).where( + ActivityItemSchema.name == "age_screen", + ActivityItemSchema.activity_id.in_(select(ActivitySchema.id).where(ActivitySchema.applet_id == applet_id)), ) - current_version_activity_id = await session.execute(subquery) - current_version_activity_id = current_version_activity_id.scalar() + res = await session.execute(query) - print("Current activity version id: ", current_version_activity_id) + for item in res.mappings().all(): + print(f"Updating activity id: {item.activity_id}") - # Update the ActivityItemHistorySchema table - update_history_query = ( - update(ActivityItemHistorySchema) - .where( - ActivityItemHistorySchema.name == "age_screen", - ActivityItemHistorySchema.activity_id == current_version_activity_id, + # Determine the current version of the activity_id + subquery = ( + select(ActivityHistorySchema.id_version) + .where(ActivityHistorySchema.id == item.activity_id) + .order_by(desc(ActivityHistorySchema.updated_at)) + .limit(1) ) - .values( - question=func.jsonb_set( - ActivityItemHistorySchema.question, - ["en"], - cast(new_question_value["el"], JSONB), - True, + + current_version_activity_id = await session.execute(subquery) + current_version_activity_id = current_version_activity_id.scalar() + + print("Current activity version id: ", current_version_activity_id) + + # Update the ActivityItemHistorySchema table + update_history_query = ( + update(ActivityItemHistorySchema) + .where( + ActivityItemHistorySchema.name == "age_screen", + ActivityItemHistorySchema.activity_id == current_version_activity_id, + ) + .values( + question=func.jsonb_set( + ActivityItemHistorySchema.question, + ["en"], + cast(new_question_value["el"], JSONB), + True, + ) ) ) - ) - await session.execute(update_history_query, execution_options=immutabledict({"synchronize_session": "fetch"})) + await session.execute(update_history_query, execution_options=immutabledict({"synchronize_session": "fetch"})) + print(f"Updated age screen for applet_id: {applet_id}") From c9d231dd5a6ca8a309c989db36f4d0d33cf21ab2 Mon Sep 17 00:00:00 2001 From: Marty <2614025+mbanting@users.noreply.github.com> Date: Thu, 27 Feb 2025 11:42:47 -0800 Subject: [PATCH 14/14] fix: Remove duplicated function from merge conflicts --- src/apps/schedule/crud/events.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/apps/schedule/crud/events.py b/src/apps/schedule/crud/events.py index 9904bbff0c1..583fc0bd8ba 100644 --- a/src/apps/schedule/crud/events.py +++ b/src/apps/schedule/crud/events.py @@ -84,14 +84,6 @@ async def get_all_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: result = await self._execute(query) return result.scalars().all() - async def get_all_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: - """Return all events linked to a specific applet""" - query: Query = select(EventSchema) - query = query.where(EventSchema.applet_id == applet_id, EventSchema.is_deleted.is_(False)) - - result = await self._execute(query) - return result.scalars().all() - async def get_public_by_applet_id(self, applet_id: uuid.UUID) -> list[EventSchema]: """Return event instance.""" query: Query = select(EventSchema)