diff --git a/Pipfile b/Pipfile index 1c51ea3d..e8eec74c 100644 --- a/Pipfile +++ b/Pipfile @@ -13,7 +13,7 @@ geojson = "~=2.4" geomet = "~=0.2" gunicorn = "~=20.0.4" influxdb = "~=4.0" -pg8000 = ">=1.15" +pg8000 = "==1.16.5" pymongo = "~=3.4" pytest = "~=3.0" pytest-cov = "~=2.7.1" @@ -25,6 +25,7 @@ redis = "~=2.10" requests = ">=2.20" rethinkdb = "==2.3" pickle-mixin = "==1.0.2" +pytest-lazy-fixture = "~=0.6.3" [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index 579ac704..d4132433 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e7f8ad8cd55a53504c0e2755ef48f24361297d307013522c866cdfef4a2b6517" + "sha256": "3072a96e6dca3f7c7221c3b832fec937c7efba6f6cd2bde47463d6f81d7096d9" }, "pipfile-spec": 6, "requires": { @@ -26,11 +26,11 @@ }, "attrs": { "hashes": [ - "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594", - "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.2.0" + "version": "==20.3.0" }, "certifi": { "hashes": [ @@ -57,10 +57,10 @@ }, "clickclick": { "hashes": [ - "sha256:4a890aaa9c3990cfabd446294eb34e3dc89701101ac7b41c1bff85fc210f6d23", - "sha256:ab8f229fb9906a86634bdfc6fabfc2b665f44804170720db4f6e1d98f8a58f3d" + "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c", + "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5" ], - "version": "==1.2.2" + "version": "==20.10.2" }, "connexion": { "extras": [ @@ -116,19 +116,19 @@ }, "coveralls": { "hashes": [ - "sha256:4430b862baabb3cf090d36d84d331966615e4288d8a8c5957e0fd456d0dd8bd6", - "sha256:b3b60c17b03a0dee61952a91aed6f131e0b2ac8bd5da909389c53137811409e1" + "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc", + "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617" ], "index": "pypi", - "version": "==2.1.2" + "version": "==2.2.0" }, "crate": { "hashes": [ - "sha256:23e525cfe83aa2e00c8c00bd2c4f7b3b7038bd65e27bd347d24491e42c42554a", - "sha256:2de19674271e3a2feae8380fd9418bae536f5d246e93cd68dbb7a932f52c9c19" + "sha256:6f650c2efe250b89bf35f8fe3211eb37ebc8d76f7a9c09bd73db3076708fa2fc", + "sha256:ede9c7f5964c1181010b7cd3c22d3f9030c6ccb98d6d45da5bba8057d5c33bbf" ], "index": "pypi", - "version": "==0.25.0" + "version": "==0.26.0" }, "decorator": { "hashes": [ @@ -279,11 +279,11 @@ }, "more-itertools": { "hashes": [ - "sha256:6f83822ae94818eae2612063a5101a7311e68ae8002005b5e05f03fd74a86a20", - "sha256:9b30f12df9393f0d28af9210ff8efe48d10c94f73e5daf886f10c4b0b0b4f03c" + "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", + "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" ], "markers": "python_version >= '3.5'", - "version": "==8.5.0" + "version": "==8.6.0" }, "openapi-spec-validator": { "hashes": [ @@ -326,63 +326,73 @@ }, "pymongo": { "hashes": [ - "sha256:03dc64a9aa7a5d405aea5c56db95835f6a2fa31b3502c5af1760e0e99210be30", - "sha256:05fcc6f9c60e6efe5219fbb5a30258adb3d3e5cbd317068f3d73c09727f2abb6", - "sha256:076a7f2f7c251635cf6116ac8e45eefac77758ee5a77ab7bd2f63999e957613b", - "sha256:137e6fa718c7eff270dbd2fc4b90d94b1a69c9e9eb3f3de9e850a7fd33c822dc", - "sha256:1f865b1d1c191d785106f54df9abdc7d2f45a946b45fd1ea0a641b4f982a2a77", - "sha256:213c445fe7e654621c6309e874627c35354b46ef3ee807f5a1927dc4b30e1a67", - "sha256:25e617daf47d8dfd4e152c880cd0741cbdb48e51f54b8de9ddbfe74ecd87dd16", - "sha256:3d9bb1ba935a90ec4809a8031efd988bdb13cdba05d9e9a3e9bf151bf759ecde", - "sha256:40696a9a53faa7d85aaa6fd7bef1cae08f7882640bad08c350fb59dee7ad069b", - "sha256:421aa1b92c291c429668bd8d8d8ec2bd00f183483a756928e3afbf2b6f941f00", - "sha256:4437300eb3a5e9cc1a73b07d22c77302f872f339caca97e9bf8cf45eca8fa0d2", - "sha256:455f4deb00158d5ec8b1d3092df6abb681b225774ab8a59b3510293b4c8530e3", - "sha256:475a34a0745c456ceffaec4ce86b7e0983478f1b6140890dff7b161e7bcd895b", - "sha256:4797c0080f41eba90404335e5ded3aa66731d303293a675ff097ce4ea3025bb9", - "sha256:4ae23fbbe9eadf61279a26eba866bbf161a6f7e2ffad14a42cf20e9cb8e94166", - "sha256:4b32744901ee9990aa8cd488ec85634f443526def1e5190a407dc107148249d7", - "sha256:50127b13b38e8e586d5e97d342689405edbd74ad0bd891d97ee126a8c7b6e45f", - "sha256:50531caa7b4be1c4ed5e2d5793a4e51cc9bd62a919a6fd3299ef7c902e206eab", - "sha256:63a5387e496a98170ffe638b435c0832c0f2011a6f4ff7a2880f17669fff8c03", - "sha256:68220b81850de8e966d4667d5c325a96c6ac0d6adb3d18935d6e3d325d441f48", - "sha256:689142dc0c150e9cb7c012d84cac2c346d40beb891323afb6caf18ec4caafae0", - "sha256:6a15e2bee5c4188369a87ed6f02de804651152634a46cca91966a11c8abd2550", - "sha256:7122ffe597b531fb065d3314e704a6fe152b81820ca5f38543e70ffcc95ecfd4", - "sha256:7307024b18266b302f4265da84bb1effb5d18999ef35b30d17592959568d5c0a", - "sha256:7a4a6f5b818988a3917ec4baa91d1143242bdfece8d38305020463955961266a", - "sha256:83c5a3ecd96a9f3f11cfe6dfcbcec7323265340eb24cc996acaecea129865a3a", - "sha256:890b0f1e18dbd898aeb0ab9eae1ab159c6bcbe87f0abb065b0044581d8614062", - "sha256:8deda1f7b4c03242f2a8037706d9584e703f3d8c74d6d9cac5833db36fe16c42", - "sha256:8ea13d0348b4c96b437d944d7068d59ed4a6c98aaa6c40d8537a2981313f1c66", - "sha256:91e96bf85b7c07c827d339a386e8a3cf2e90ef098c42595227f729922d0851df", - "sha256:96782ebb3c9e91e174c333208b272ea144ed2a684413afb1038e3b3342230d72", - "sha256:9755c726aa6788f076114dfdc03b92b03ff8860316cca00902cce88bcdb5fedd", - "sha256:9dbab90c348c512e03f146e93a5e2610acec76df391043ecd46b6b775d5397e6", - "sha256:9ee0eef254e340cc11c379f797af3977992a7f2c176f1a658740c94bf677e13c", - "sha256:9fc17fdac8f1973850d42e51e8ba6149d93b1993ed6768a24f352f926dd3d587", - "sha256:a2787319dc69854acdfd6452e6a8ba8f929aeb20843c7f090e04159fc18e6245", - "sha256:b7c522292407fa04d8195032493aac937e253ad9ae524aab43b9d9d242571f03", - "sha256:bd312794f51e37dcf77f013d40650fe4fbb211dd55ef2863839c37480bd44369", - "sha256:c0d660a186e36c526366edf8a64391874fe53cf8b7039224137aee0163c046df", - "sha256:c4869141e20769b65d2d72686e7a7eb141ce9f3168106bed3e7dcced54eb2422", - "sha256:cc4057f692ac35bbe82a0a908d42ce3a281c9e913290fac37d7fa3bd01307dfb", - "sha256:cccf1e7806f12300e3a3b48f219e111000c2538483e85c869c35c1ae591e6ce9", - "sha256:ce208f80f398522e49d9db789065c8ad2cd37b21bd6b23d30053474b7416af11", - "sha256:d0565481dc196986c484a7fb13214fc6402201f7fb55c65fd215b3324962fe6c", - "sha256:d1b3366329c45a474b3bbc9b9c95d4c686e03f35da7fd12bc144626d1f2a7c04", - "sha256:d226e0d4b9192d95079a9a29c04dd81816b1ce8903b8c174a39224fe978547cb", - "sha256:d38b35f6eef4237b1d0d8e845fc1546dad85c55eba447e28c211da8c7ef9697c", - "sha256:d64c98277ea80e4484f1332ab107e8dfd173a7dcf1bdbf10a9cccc97aaab145f", - "sha256:d9de8427a5601799784eb0e7fa1b031aa64086ce04de29df775a8ca37eedac41", - "sha256:e6a15cf8f887d9f578dd49c6fb3a99d53e1d922fdd67a245a67488d77bf56eb2", - "sha256:e8c446882cbb3774cd78c738c9f58220606b702b7c1655f1423357dc51674054", - "sha256:e8d188ee39bd0ffe76603da887706e4e7b471f613625899ddf1e27867dc6a0d3", - "sha256:ef76535776c0708a85258f6dc51d36a2df12633c735f6d197ed7dfcaa7449b99", - "sha256:f6efca006a81e1197b925a7d7b16b8f61980697bb6746587aad8842865233218" + "sha256:00f6c32f86a5bd1cbefcc0a27ea06565628de3bb2e6786d3f0dce0330e70c958", + "sha256:016e8162b57e2a45cb8d2356f39795ccff2ee65fd79fe078de4f9aa78ef1994b", + "sha256:078e74cffb4955a454dd0955c3fa38327185a41447ac4e368f81c2f0c07e6559", + "sha256:079a30c21d3c334ee65581a8cac5380e94521970423996c5b18a7c550230d94c", + "sha256:0b5aa85a04efcf22c176de25e3fce675510d7700f523728fa9485d576db41358", + "sha256:0dd8c0367639cd5cf84be91af6b733076119745aea6e53fdf9e581819d911eac", + "sha256:1529b23a51ef8613712b3e19225690564955250932d58487af6c060413ce0f1f", + "sha256:17a3b2148b9e0914dc5f0d6a5c636f81dc4b428b80ea45576f79cfe619844c6d", + "sha256:1f5fabe75c9b7eb5a42dac9717f952a879ab3705bcf7e9ef744cdbdfd37bcf3d", + "sha256:220216da1d4fb10f941ff5e408f2958896fe534283bb3b1c1c17d4b0ac5d8b45", + "sha256:251acfa87e07e47044ed7f6157fc63a95020bb4ee9d705fb2faf3b67e6a3574e", + "sha256:2542b21b08dc30bf0a69de55a42225f25b0af0dea7852edf2011952abb50e7b4", + "sha256:2bcbde25342fa0991b0c144c8eafadc77e605e7940bf462922f6d550f88d6777", + "sha256:2c0b0d201182bfbbfb2c679af3118ca53926a31d8c0c21e9b7943f8264ec0052", + "sha256:319f1b5a8373e280026905093aaacf4cca858a9ae653e456b9f9f9ad5f308088", + "sha256:342248b25c193ab20e1145989455d614d4d553334576a72be600ee371fa5de41", + "sha256:3b8076ff801dca0920f1b5c76a0e062dc26bb76de7e79efcf347c3a5ff776127", + "sha256:3cf7726216a8792d147ba44433fddc19ed149531fb23899ce82d24a0a90ec2fd", + "sha256:4266ebb14bed0206c73e2f626949679c23a6f935ea4b60529d7c3551f2f3051a", + "sha256:4cc8cf971ee53ad65e53b80a6f611070dbe55640b447ae9b2b98329aebd21155", + "sha256:4da19b5c555cf1d8b8a0b980d9c97b1b0f27e05bcf278bf64cc6c30b697a59f9", + "sha256:4f0023db6168f052ffeec435ca0608ffe8abac17a36b8084bdc348c978e08a23", + "sha256:5012342f457a544818254a89f7e7a4ecd05c4eaa89ed68ae58727039c92ff0f7", + "sha256:56bbbccf22fd91ae88b2dffe56dceb84d80fa808065e6bcbedb86ec7e0f84b3b", + "sha256:5a07a9983e0bc51ad61a16c228d1b60d1f98f7df5f225f435b41f8921d335e06", + "sha256:5a88fad3dcfaa222383ceb53af9a030a841ad998636648a748d79515c8afb6b4", + "sha256:5fc04e445e58103bfddb601822ab355ffb8fb760142593e2b0f20c3940859352", + "sha256:613d30623bd1f9418d5a6999f73066b01831bf7c7789bc3fe2bf44b5fe5dc67d", + "sha256:6e7b2b589a600f467e9159df907638c2d08aca46b0f74d88ceeaa185abd9069b", + "sha256:74b6e8e240e53518a701f4cd56a518c8de2631d6019e93295499db15cf46d973", + "sha256:78d4142116d6d910f1c195f3253b6209f21bb1b06fb6c6ebf90cbf80518c4071", + "sha256:82bc20e554fe9175e6ae53db74c3f094c4d752a55e3b363b93ff93d87a868cb7", + "sha256:8becbccb58b7b37ce2aa5f387f298914ec520747f11edfbc31347bd2ba7e3481", + "sha256:903396921ad52c63c423683a141391e28edb8b6dfbd2388a6848b052a9e23863", + "sha256:9a5667eb4bc9ed805d1a06ca8b5ff7ee25df666b05cbb8f58f9ac16cac243d0b", + "sha256:9f672a8b5972a2db3284f8c0324dcaaeceedead9b4e5c836e93092b599f2dbf0", + "sha256:a08c60335a4b1c6348d5be176f379f7b69f2021d1ebaafb11586007f6268a423", + "sha256:a122c9e30e49bbd8c5c423e3ea2bcff5deb8a2c504d88cbfc3c21b036295bbf3", + "sha256:a6bf56c1f6d13b0e22db604c73fba6feca088fe7f6aa978cc215f83e2005d765", + "sha256:a9c1a2538cd120283e7137ac97ce27ebdfcb675730c5055d6332b0043f4e5a55", + "sha256:aacc9b646e142e7d32b88f0dccc6ab28f669ecf3ccc7212a274b99c83e228ef1", + "sha256:ae0403befca6a9088be702c1d94fc1b17c333cd84a43328c973d223991c41936", + "sha256:b3b939780963164086fc256436c1bf9301d4c5c99026e2c281b21237234aaa2c", + "sha256:b6282855f9193f4e7ae07c2d138b583d487d0e66add62eda7013365815188ce9", + "sha256:bb3f19af1949cbf93b17021c8c61e14e697c3f5c8923134b085dcef9d271b699", + "sha256:be124527bfc30869e8a17915e1e960150757553d58c98e56c598fbb85697e32e", + "sha256:c66de369d459f081a1860c58c6218da5e30a4c5d07277526f66f6c0b0efe742f", + "sha256:c756d00b728f0d5ec65519d9005389fea519b2ad3aef0896c49ce80e6da8b547", + "sha256:c7953352bc27ac5fbf79d43ef7bf576ad06fa06c0ae0d6ad7c36b14cd596d565", + "sha256:cb81aa36a171ed1c28d615577ab5feae8e0b8e48818833663b446dd2bb8b53cd", + "sha256:cc375f5cea8635597c21ff6bc61486ebe5dca5e662982c9e2b58a9106f92b56e", + "sha256:d349cfc776f8859c2f99ff916e307555f5615ffabfbd6162f3822f21aa1e22ed", + "sha256:dbc23ece1b111a10eb6d2475a7726b70418303d2e05078d223e7f97b286745a7", + "sha256:e3158d2824391c52020d67e629d2586af774b543a75dc9f64eb830991ac2776e", + "sha256:e5b4ba4939c2ab6550ecef1ccd7b00537de7a7e18a8f03bce0fc4786111b4d47", + "sha256:e6d72517fa7370841981770c3802e7a8ca7e94ead1fba9981349fbe8e539f7eb", + "sha256:e6e6089393646c1ef865484c27871d52ead69641dce5b53cbae2096cec615151", + "sha256:e83d61f9a247344c701147934f203117c3064c982d35396565a6ca8356bc0ea9", + "sha256:e9d7af8f668d2880fff8539188694e75e0b91d37174672a59dc5c5a0fea9f60d", + "sha256:ebf21c07353d313421212e8ac0b21b6161c81aa71a12471b58629e38c784a751", + "sha256:eebe522043450e8cf83ab7be2fc0268dfe702de586970d752cb012d6ce72309f", + "sha256:f1ee136a8648cd76b44afdff99096823e68be90d02188d30e2ccd00b58e9b353", + "sha256:f648d58915eb339329741296599fb25bc2a76474e64bdfeae4423ae83b312fb8", + "sha256:fd09c310c54c6861e1488fcd30ceffa5dcd6c2dfe9f8409f47a8266fdc698547" ], "index": "pypi", - "version": "==3.11.0" + "version": "==3.11.1" }, "pyrsistent": { "hashes": [ @@ -415,6 +425,14 @@ "index": "pypi", "version": "==0.15.1" }, + "pytest-lazy-fixture": { + "hashes": [ + "sha256:0e7d0c7f74ba33e6e80905e9bfd81f9d15ef9a790de97993e34213deb5ad10ac", + "sha256:e0b379f38299ff27a653f03eaa69b08a6fd4484e46fd1c9907d984b9f9daeda6" + ], + "index": "pypi", + "version": "==0.6.3" + }, "python-dateutil": { "hashes": [ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", @@ -425,21 +443,23 @@ }, "pytz": { "hashes": [ - "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed", - "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048" + "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268", + "sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd" ], - "version": "==2020.1" + "version": "==2020.4" }, "pyyaml": { "hashes": [ "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" @@ -464,11 +484,11 @@ }, "requests": { "hashes": [ - "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", - "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" + "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8", + "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998" ], "index": "pypi", - "version": "==2.24.0" + "version": "==2.25.0" }, "rethinkdb": { "hashes": [ @@ -502,11 +522,11 @@ }, "urllib3": { "hashes": [ - "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", - "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" + "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", + "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.25.10" + "version": "==1.26.2" }, "werkzeug": { "hashes": [ diff --git a/docker/docker-compose-dev.yml b/docker/docker-compose-dev.yml index 32ea206f..93cd078c 100644 --- a/docker/docker-compose-dev.yml +++ b/docker/docker-compose-dev.yml @@ -96,5 +96,3 @@ volumes: networks: default: - driver_opts: - com.docker.network.driver.mtu: ${DOCKER_MTU:-1400} diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index d0510fc8..7828164b 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -70,5 +70,3 @@ volumes: networks: default: - driver_opts: - com.docker.network.driver.mtu: ${DOCKER_MTU:-1400} diff --git a/run_tests.sh b/run_tests.sh index 196ad6ab..c29a9c18 100644 --- a/run_tests.sh +++ b/run_tests.sh @@ -16,7 +16,7 @@ tot=$? cd - cd src/reporter/tests -test_suite_header "REPORTER (Crate)" +test_suite_header "REPORTER" sh run_tests.sh loc=$? if [ "$tot" -eq 0 ]; then @@ -24,15 +24,6 @@ if [ "$tot" -eq 0 ]; then fi cd - -cd src/reporter/tests -test_suite_header "REPORTER (Timescale)" -sh run_tests.timescale.sh -loc=$? -if [ "$tot" -eq 0 ]; then - tot=$loc -fi -cd - - cd src/geocoding/tests test_suite_header "GEO-CODING" sh run_tests.sh diff --git a/src/conftest.py b/src/conftest.py index e0f2eb49..2a6f27a6 100644 --- a/src/conftest.py +++ b/src/conftest.py @@ -15,7 +15,7 @@ CRATE_PORT = 4200 POSTGRES_HOST = os.environ.get('POSTGRES_HOST', 'timescale') -POSTGRES_HOST = 5432 +POSTGRES_PORT = 5432 REDIS_HOST = os.environ.get('REDIS_HOST', 'redis') REDIS_PORT = 6379 @@ -70,6 +70,12 @@ def insert(self, entity, service=None, service_path=None): headers=headers(service, service_path)) return r + def update_attr(self, entity_id, attrs, service=None, service_path=None): + r = requests.patch('{}/v2/entities/{}/attrs'.format(self.url, entity_id), + data=json.dumps(attrs), + headers=headers(service, service_path)) + return r + def delete(self, entity_id, service=None, service_path=None): r = requests.delete('{}/v2/entities/{}'.format(self.url, entity_id), headers=headers(service, service_path)) @@ -184,6 +190,54 @@ def clean(self, fiware_service=None, **kwargs): yield trans +@pytest.fixture() +def timescale_translator(): + from src.translators.timescale import PostgresTranslator, \ + PostgresConnectionData + + class Translator(PostgresTranslator): + + def insert(self, entities, + fiware_service=None, fiware_servicepath=None): + r = PostgresTranslator.insert(self, entities, + fiware_service, fiware_servicepath) + return r + + def delete_entity(self, entity_id, entity_type=None, + fiware_service=None, **kwargs): + r = PostgresTranslator.delete_entity(self, entity_id, entity_type, + fiware_service=fiware_service, + **kwargs) + return r + + def delete_entities(self, entity_type=None, fiware_service=None, + **kwargs): + r = PostgresTranslator.delete_entities(self, entity_type, + fiware_service=fiware_service, + **kwargs) + return r + + def entity_types(self, fiware_service=None, **kwargs): + r = PostgresTranslator.query_entity_types(self, entity_type=None, + fiware_service=fiware_service, + **kwargs) + return r + + def clean(self, fiware_service=None, **kwargs): + types = PostgresTranslator.query_entity_types(self, + fiware_service=fiware_service, + **kwargs) + if types: + for t in types: + PostgresTranslator.drop_table(self, t, + fiware_service=fiware_service, + **kwargs) + + with Translator(PostgresConnectionData(host=POSTGRES_HOST, + port=POSTGRES_PORT)) as trans: + yield trans + + @pytest.fixture def entity(): entity = { @@ -454,3 +508,71 @@ def traffic_flow_observed(): } } return entity + + +@pytest.fixture +def ngsi_ld(): + """ + :return: dict + The NGSI LD model as received within an Orion notification. + """ + entity = { + "id": "urn:ngsi-ld:Streetlight:streetlight:guadalajara:4567", + "type": "Streetlight", + "location": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [-3.164485591715449, 40.62785133667262] + } + }, + "areaServed": { + "type": "Property", + "value": "Roundabouts city entrance" + }, + "status": { + "type": "Property", + "value": "ok" + }, + "refStreetlightGroup": { + "type": "Relationship", + "object": "urn:ngsi-ld:StreetlightGroup:streetlightgroup:G345" + }, + "refStreetlightModel": { + "type": "Relationship", + "object": "urn:ngsi-ld:StreetlightModel:streetlightmodel:STEEL_Tubular_10m" + }, + "circuit": { + "type": "Property", + "value": "C-456-A467" + }, + "lanternHeight": { + "type": "Property", + "value": 10 + }, + "locationCategory": { + "type": "Property", + "value": "centralIsland" + }, + "powerState": { + "type": "Property", + "value": "off" + }, + "controllingMethod": { + "type": "Property", + "value": "individual" + }, + "dateLastLampChange": { + "type": "Property", + "value": { + "@type": "DateTime", + "@value": "2016-07-08T08:02:21.753Z" + } + }, + "@context": [ + "https://schema.lab.fiware.org/ld/context", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] + } + + return entity diff --git a/src/geocoding/geojson/wktcodec.py b/src/geocoding/geojson/wktcodec.py index 46cec13b..8ac146d9 100644 --- a/src/geocoding/geojson/wktcodec.py +++ b/src/geocoding/geojson/wktcodec.py @@ -79,7 +79,12 @@ def decode_wkb_hexstr(geom: str) -> dict: :return: the corresponding GeoJSON. """ geom_bytes = bytes.fromhex(geom) - return decode_wkb(geom_bytes) + geojson = decode_wkb(geom_bytes) + if 'meta' in geojson: + geojson.pop('meta') + if 'crs' in geojson: + geojson.pop('crs') + return geojson # TODO. Use shapely? diff --git a/src/reporter/tests/docker-compose.timescale.yml b/src/reporter/tests/docker-compose.timescale.yml deleted file mode 100644 index 3c9fb6de..00000000 --- a/src/reporter/tests/docker-compose.timescale.yml +++ /dev/null @@ -1,117 +0,0 @@ -version: '3' - -services: - - timescale: - image: timescale/timescaledb-postgis:${TIMESCALE_VERSION} - ports: - - "54320:5432" - # Don't expose container port 5432 with the same number outside of the - # swarm. In the Travis test env, there's already a PG instance running - # on port 5432! - networks: - - reportertests - environment: - - POSTGRES_PASSWORD=* - - quantumleap-db-setup: - build: ../../../timescale-container/ - image: quantumleap-db-setup - depends_on: - - timescale - networks: - - reportertests - environment: - - QL_DB_PASS=* - - QL_DB_INIT_DIR=/ql-db-init - - PG_HOST=timescale - - PG_PASS=* - - quantumleap: - build: ../../../ - image: smartsdk/quantumleap - ports: - - "8668:8668" - depends_on: - - timescale - networks: - - reportertests - environment: - - USE_GEOCODING=False - - QL_DEFAULT_DB=timescale - - POSTGRES_HOST=${POSTGRES_HOST} - - POSTGRES_PORT=54320 - - LOGLEVEL=DEBUG - -networks: - reportertests: - driver: bridge - -# TODO: QL PG host. -# Setting POSTGRES_HOST=timescale doesn't work. The driver fails to connect, -# see debug session below. Why is that? Setting POSTGRES_HOST=${POSTGRES_HOST} -# as done in the above quantumleap service stanza works on my machine but -# I'm not entirely sure it's 100% portable... -# -# Here's the transcript of a debug session on my machine. -# -# $ docker-compose -f docker-compose.timescale.yml up -d -# $ docker ps -# CONTAINER ID IMAGE ... -# 3e516e0ebba4 smartsdk/quantumleap ... -# $ docker exec -it 3e516e0ebba4 sh -# -# /src/ngsi-timeseries-api/src # printenv -# ... -# POSTGRES_HOST=192.0.0.1 -# LOGLEVEL=DEBUG -# ... -# POSTGRES_PORT=54320 -# QL_DEFAULT_DB=timescale -# ... -# -# /src/ngsi-timeseries-api/src # nslookup timescale -# Server: 127.0.0.11 -# Address: 127.0.0.11:53 -# -# Non-authoritative answer: -# Non-authoritative answer: -# Name: timescale -# Address: 172.28.0.2 -# -# /src/ngsi-timeseries-api/src # ping -c 1 timescale -# PING timescale (172.28.0.2): 56 data bytes -# ... -# 1 packets transmitted, 1 packets received, 0% packet loss -# ... -# -# /src/ngsi-timeseries-api/src # python -# >>> import pg8000 -# >>> pg8000.connect(host='timescale', port=54320, -# database='quantumleap', user='quantumleap', password='*') -# ... -# pg8000.exceptions.InterfaceError: Can't create a connection to host -# timescale and port 54320 (timeout is None and source_address is None). -# >>> pg8000.connect(host='172.28.0.2', port=54320, -# database='quantumleap', user='quantumleap', password='*') -# ... -# pg8000.exceptions.InterfaceError: Can't create a connection to host -# 172.28.0.2 and port 54320 (timeout is None and source_address is None). -# >>> pg8000.connect(host='192.0.0.1', port=54320, -# database='quantumleap', user='quantumleap', password='*') -# -# -# >>> quit() -# /src/ngsi-timeseries-api/src # exit -# -# $ psql postgres://postgres:*@localhost:54320 -c 'SELECT * FROM pg_hba_file_rules' -# line_number | type | database | user_name | address | netmask | auth_method | options | error -# -------------+-------+---------------+-----------+-----------+-----------------------------------------+-------------+---------+------- -# 84 | local | {all} | {all} | | | trust | | -# 86 | host | {all} | {all} | 127.0.0.1 | 255.255.255.255 | trust | | -# 88 | host | {all} | {all} | ::1 | ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff | trust | | -# 91 | local | {replication} | {all} | | | trust | | -# 92 | host | {replication} | {all} | 127.0.0.1 | 255.255.255.255 | trust | | -# 93 | host | {replication} | {all} | ::1 | ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff | trust | | -# 95 | host | {all} | {all} | all | | md5 | | -# \ No newline at end of file diff --git a/src/reporter/tests/docker-compose.yml b/src/reporter/tests/docker-compose.yml index cba082a9..131f4dc9 100644 --- a/src/reporter/tests/docker-compose.yml +++ b/src/reporter/tests/docker-compose.yml @@ -32,12 +32,39 @@ services: depends_on: - orion - crate + - timescale + volumes: + - ${PWD}/ql-config.yml:/config/ql-config.yml networks: - reportertests environment: - - USE_GEOCODING=True + - USE_GEOCODING=False - REDIS_HOST=redis - - LOGLEVEL=DEBUG + - LOGLEVEL=INFO + - POSTGRES_HOST=timescale + - QL_CONFIG=/config/ql-config.yml + + timescale: + image: timescale/timescaledb-postgis:${TIMESCALE_VERSION} + ports: + - "5432:5432" + networks: + - reportertests + environment: + - POSTGRES_PASSWORD=* + + quantumleap-db-setup: + build: ../../../timescale-container/ + image: quantumleap-db-setup + depends_on: + - timescale + networks: + - reportertests + environment: + - QL_DB_PASS=* + - QL_DB_INIT_DIR=/ql-db-init + - PG_HOST=timescale + - PG_PASS=* crate: image: crate:${CRATE_VERSION} diff --git a/src/reporter/tests/ql-config.yml b/src/reporter/tests/ql-config.yml new file mode 100644 index 00000000..9dd43a91 --- /dev/null +++ b/src/reporter/tests/ql-config.yml @@ -0,0 +1,6 @@ +tenants: + t1: + backend: Crate + t2: + backend: Timescale +default-backend: Crate diff --git a/src/reporter/tests/run_tests.sh b/src/reporter/tests/run_tests.sh index 1c7cc246..9293d8b9 100644 --- a/src/reporter/tests/run_tests.sh +++ b/src/reporter/tests/run_tests.sh @@ -1,14 +1,20 @@ #!/usr/bin/env bash +POSTGRES_PORT='5432' + docker build -t smartsdk/quantumleap ../../../ docker-compose up -d sleep 20 cd ../../../ -pytest src/reporter/ --cov-report= --cov-config=.coveragerc --cov-append --cov=src/ +pytest src/reporter/ \ + --cov-report= --cov-config=.coveragerc --cov-append --cov=src/ \ + --ignore=src/reporter/tests/test_health.py r=$? cd - +unset POSTGRES_PORT + docker-compose down -v exit $r diff --git a/src/reporter/tests/run_tests.timescale.sh b/src/reporter/tests/run_tests.timescale.sh deleted file mode 100644 index ca946cb8..00000000 --- a/src/reporter/tests/run_tests.timescale.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env bash - -docker build -t smartsdk/quantumleap ../../../ - -docker-compose -f docker-compose.timescale.yml up -d -sleep 10 - -# Set Postgres port to same value as in docker-compose.timescale.yml -export POSTGRES_PORT='54320' - -cd ../../../ - -# pytest src/reporter/ --cov-report= --cov-config=.coveragerc --cov-append --cov=src/ -# TODO: comment in above and zap line below when Timescale backend -# is fully functional. - -pytest src/reporter/ \ - --cov-report= --cov-config=.coveragerc --cov-append --cov=src/ \ - --ignore=src/reporter/tests/test_health.py \ - --ignore=src/reporter/tests/test_integration.py \ - --ignore=src/reporter/tests/test_multitenancy.py \ - --ignore=src/reporter/tests/test_notify.py \ - --ignore=src/reporter/tests/test_sql_injection.py \ - --ignore=src/reporter/tests/test_subscribe.py \ - --ignore=src/reporter/tests/test_time_format.py - -r=$? -cd - - -unset POSTGRES_PORT - -docker-compose -f docker-compose.timescale.yml down -v -exit $r - -# NOTE. Ignored tests. -# See https://github.com/smartsdk/ngsi-timeseries-api/issues/378 diff --git a/src/reporter/tests/test_Headers.py b/src/reporter/tests/test_Headers.py index 07b86c80..140165da 100644 --- a/src/reporter/tests/test_Headers.py +++ b/src/reporter/tests/test_Headers.py @@ -1,6 +1,7 @@ from datetime import datetime from conftest import QL_URL from utils.common import assert_equal_time_index_arrays +from reporter.tests.utils import delete_entity_type import copy import json import pytest @@ -38,6 +39,7 @@ def test_for_valid_headers(notification): ] } assert res_get.json() == exp_values + delete_entity_type('test', 'Room') def test_for_invalid_headers(notification): notification['data'][0] = { diff --git a/src/reporter/tests/test_NTNE.py b/src/reporter/tests/test_NTNE.py index 6f862787..7a42d78f 100644 --- a/src/reporter/tests/test_NTNE.py +++ b/src/reporter/tests/test_NTNE.py @@ -10,6 +10,8 @@ entity_id_1 = 'Kitchen0' n_days = 30 +services = ['t1', 't2'] + def query_url(): url = "{qlUrl}/entities" @@ -20,20 +22,23 @@ def query_url(): @pytest.fixture(scope='module') def reporter_dataset(): - service = '' - insert_test_data(service, [entity_type], n_entities=1, index_size=30, + for service in services: + insert_test_data(service, [entity_type], n_entities=1, index_size=30, entity_id=entity_id) - insert_test_data(service, [entity_type_1], n_entities=1, index_size=30, + insert_test_data(service, [entity_type_1], n_entities=1, index_size=30, entity_id=entity_id_1, index_base=datetime(1980, 1, 1, 0, 0, 0, 0)) yield - delete_test_data(service, [entity_type, entity_type_1]) + for service in services: + delete_test_data(service, [entity_type, entity_type_1]) # TODO we removed order comparison given that in # CRATE4.0 union all and order by don't work correctly with offset -def test_NTNE_defaults(reporter_dataset): - r = requests.get(query_url()) +@pytest.mark.parametrize("service", services) +def test_NTNE_defaults(service, reporter_dataset): + h = {'Fiware-Service': service} + r = requests.get(query_url(), headers=h) assert r.status_code == 200, r.text obtained = r.json() @@ -54,11 +59,13 @@ def test_NTNE_defaults(reporter_dataset): assert obtained == expected -def test_not_found(): +@pytest.mark.parametrize("service", services) +def test_not_found(service): query_params = { 'type': 'NotThere' } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 404, r.text assert r.json() == { "error": "Not Found", @@ -66,12 +73,14 @@ def test_not_found(): } -def test_NTNE_type(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE_type(service, reporter_dataset): # Query query_params = { 'type': entity_type } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 200, r.text # Assert @@ -91,13 +100,15 @@ def test_NTNE_type(reporter_dataset): # TODO we removed order comparison given that in # CRATE4.0 union all and order by don't work correctly with offset -def test_NTNE_fromDate_toDate(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE_fromDate_toDate(service, reporter_dataset): # Query query_params = { 'fromDate': "1970-01-06T00:00:00+00:00", 'toDate': "1980-01-17T00:00:00+00:00", } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 200, r.text expected_type = 'Room' @@ -125,13 +136,15 @@ def test_NTNE_fromDate_toDate(reporter_dataset): assert obtained == expected -def test_NTNE_fromDate_toDate_with_quotes(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE_fromDate_toDate_with_quotes(service, reporter_dataset): # Query query_params = { 'fromDate': '"1970-01-06T00:00:00+00:00"', 'toDate': '"1980-01-17T00:00:00+00:00"', } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 200, r.text expected_type = 'Room' @@ -161,12 +174,14 @@ def test_NTNE_fromDate_toDate_with_quotes(reporter_dataset): # TODO we removed order comparison given that in # CRATE4.0 union all and order by don't work correctly with offset -def test_NTNE_limit(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE_limit(service, reporter_dataset): # Query query_params = { 'limit': 1 } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 200, r.text expected_type = 'Kitchen' @@ -187,12 +202,14 @@ def test_NTNE_limit(reporter_dataset): # TODO we removed order comparison given that in # CRATE4.0 union all and order by don't work correctly with offset -def test_NTNE_offset(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE_offset(service, reporter_dataset): # Query query_params = { 'offset': 1 } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 200, r.text expected_type = 'Kitchen' @@ -211,7 +228,8 @@ def test_NTNE_offset(reporter_dataset): assert len(obtained) == len(expected) -def test_NTNE_combined(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE_combined(service, reporter_dataset): # Query query_params = { 'type': entity_type, @@ -220,7 +238,8 @@ def test_NTNE_combined(reporter_dataset): 'toDate': "1980-01-20T00:00:00+00:00", 'limit': 1, } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 200, r.text expected_type = 'Room' diff --git a/src/reporter/tests/test_NTNE1A.py b/src/reporter/tests/test_NTNE1A.py index 860a9664..22dae598 100644 --- a/src/reporter/tests/test_NTNE1A.py +++ b/src/reporter/tests/test_NTNE1A.py @@ -15,6 +15,7 @@ value_generator=temperatures) index = result_gen.time_index() +services = ['t1', 't2'] def ix_intervals(): bs = list(range(0, result_gen.time_index_size)) + [None] @@ -34,15 +35,16 @@ def query_url(values=False): @pytest.fixture(scope='module') def reporter_dataset(): - service = '' entity_type = result_gen.formatter.entity_type sz = result_gen.time_index_size - insert_test_data(service, [entity_type], n_entities=1, + for service in services: + insert_test_data(service, [entity_type], n_entities=1, index_size=sz, entity_id=entity_id_1) - insert_test_data(service, [entity_type], n_entities=1, + insert_test_data(service, [entity_type], n_entities=1, index_size=sz, entity_id=entity_id_2) yield - delete_test_data(service, [entity_type]) + for service in services: + delete_test_data(service, [entity_type]) def assert_entities(response, entity_ids, ix_lo=None, ix_hi=None, @@ -66,47 +68,59 @@ def assert_aggregate(response, entity_ids, aggregator, ix_lo=None, ix_hi=None): assert actual == expected -def test_NTNE1A_defaults(reporter_dataset): - response = requests.get(query_url()) +@pytest.mark.parametrize("service", services) +def test_NTNE1A_defaults(service, reporter_dataset): + h = {'Fiware-Service': service} + + response = requests.get(query_url(), headers=h) assert_entities(response, [entity_id_1, entity_id_2]) -def test_NTNE1A_type(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE1A_type(service, reporter_dataset): query_params = { 'type': result_gen.entity_type() } - response = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + response = requests.get(query_url(), params=query_params, headers=h) assert_entities(response, [entity_id_1, entity_id_2]) -def test_NTNE1A_one_entity(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE1A_one_entity(service, reporter_dataset): query_params = { 'id': entity_id_1 } - response = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + response = requests.get(query_url(), params=query_params, headers=h) assert_entities(response, [entity_id_1]) -def test_NTNENA_some_entities(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNENA_some_entities(service, reporter_dataset): entity_ids = "{}, {}".format(entity_id_1, entity_id_2) query_params = { 'id': entity_ids } - response = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + response = requests.get(query_url(), params=query_params, headers=h) assert_entities(response, [entity_id_1, entity_id_2]) -def test_NTNE1A_values_defaults(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE1A_values_defaults(service, reporter_dataset): entity_ids = "{},{},{}".format(entity_id_1, entity_id_2, 'RoomNotValid') # should ignore RoomNotValid query_params = { 'id': entity_ids } - response = requests.get(query_url(values=True), params=query_params) + h = {'Fiware-Service': service} + response = requests.get(query_url(values=True), params=query_params, headers=h) assert_entities(response, [entity_id_1, entity_id_2], values_only=True) -def test_weird_ids(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_weird_ids(service, reporter_dataset): """ Invalid ids are ignored (provided at least one is valid to avoid 404). Empty values are ignored. @@ -116,53 +130,63 @@ def test_weird_ids(reporter_dataset): query_params = { 'id': entity_ids } - response = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + response = requests.get(query_url(), params=query_params, headers=h) assert_entities(response, [entity_id_1, entity_id_2]) +@pytest.mark.parametrize("service", services) @pytest.mark.parametrize('ix_lo, ix_hi', ix_intervals()) -def test_NTNE1A_fromDate_toDate(reporter_dataset, ix_lo, ix_hi): +def test_NTNE1A_fromDate_toDate(service, reporter_dataset, ix_lo, ix_hi): query_params = { 'types': 'entity_type' } + h = {'Fiware-Service': service} if ix_lo is not None: query_params['fromDate'] = index[ix_lo] if ix_hi is not None: query_params['toDate'] = index[ix_hi] - response = requests.get(query_url(), params=query_params) + response = requests.get(query_url(), params=query_params, headers=h) assert_entities(response, [entity_id_1, entity_id_2], ix_lo, ix_hi) -def test_NTNE1A_fromDate_toDate_with_quotes(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE1A_fromDate_toDate_with_quotes(service, reporter_dataset): query_params = { 'types': 'entity_type', 'fromDate': '"{}"'.format(index[0]), 'toDate': '"{}"'.format(index[-1]) } - response = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + response = requests.get(query_url(), params=query_params, headers=h) assert_entities(response, [entity_id_1, entity_id_2]) -def test_NTNE1A_limit(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE1A_limit(service, reporter_dataset): query_params = { 'limit': 10 } - response = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + response = requests.get(query_url(), params=query_params, headers=h) assert_entities(response, [entity_id_1, entity_id_2]) -def test_NTNE1A_combined(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE1A_combined(service, reporter_dataset): query_params = { 'type': result_gen.entity_type(), 'fromDate': index[0], 'toDate': index[2], 'limit': 10, } - response = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + response = requests.get(query_url(), params=query_params, headers=h) assert_entities(response, [entity_id_1, entity_id_2], ix_hi=2) +@pytest.mark.parametrize("service", services) @pytest.mark.parametrize("aggr_period, exp_index, ins_period", [ ("day", ['1970-01-01T00:00:00.000+00:00', '1970-01-02T00:00:00.000+00:00', @@ -174,9 +198,8 @@ def test_NTNE1A_combined(reporter_dataset): '1970-01-01T00:01:00.000+00:00', '1970-01-01T00:02:00.000+00:00'], "second"), ]) -def test_NTNE1A_aggrPeriod(aggr_period, exp_index, ins_period): +def test_NTNE1A_aggrPeriod(service, aggr_period, exp_index, ins_period): # Custom index to test aggrPeriod - service = '' entity_type = 'test_NTNE1A_aggrPeriod' # The reporter_dataset fixture is still in the DB cos it has a scope of # module. We use a different entity type to store this test's rows in a @@ -198,7 +221,8 @@ def test_NTNE1A_aggrPeriod(aggr_period, exp_index, ins_period): query_params = { 'aggrPeriod': aggr_period, } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 400, r.text # Check aggregation with aggrPeriod @@ -209,7 +233,8 @@ def test_NTNE1A_aggrPeriod(aggr_period, exp_index, ins_period): 'aggrMethod': 'sum', 'aggrPeriod': aggr_period, } - r = requests.get(query_url(), params=query_params) + + r = requests.get(query_url(), params=query_params, headers=h) delete_test_data(service, [entity_type]) @@ -239,11 +264,13 @@ def test_NTNE1A_aggrPeriod(aggr_period, exp_index, ins_period): assert obtained == expected -def test_not_found(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_not_found(service, reporter_dataset): query_params = { 'id': 'RoomNotValid' } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 404, r.text assert r.json() == { "error": "Not Found", @@ -251,16 +278,19 @@ def test_not_found(reporter_dataset): } -def test_NTNE1A_aggrScope(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE1A_aggrScope(service, reporter_dataset): # Notify users when not yet implemented query_params = { 'aggrMethod': 'avg', 'aggrScope': 'global', } - r = requests.get(query_url(), params=query_params) + h = {'Fiware-Service': service} + r = requests.get(query_url(), params=query_params, headers=h) assert r.status_code == 501, r.text +@pytest.mark.parametrize("service", services) @pytest.mark.parametrize('aggr_method, aggregator, ix_lo, ix_hi', [('count', len, lo, hi) for (lo, hi) in ix_intervals()] + [('sum', sum, lo, hi) for (lo, hi) in ix_intervals()] + @@ -268,23 +298,25 @@ def test_NTNE1A_aggrScope(reporter_dataset): [('min', min, lo, hi) for (lo, hi) in ix_intervals()] + [('max', max, lo, hi) for (lo, hi) in ix_intervals()] ) -def test_aggregating_entities_of_same_type(reporter_dataset, +def test_aggregating_entities_of_same_type(service, reporter_dataset, aggr_method, aggregator, ix_lo, ix_hi): query_params = { 'type': result_gen.entity_type(), 'aggrMethod': aggr_method } + h = {'Fiware-Service': service} if ix_lo is not None: query_params['fromDate'] = index[ix_lo] if ix_hi is not None: query_params['toDate'] = index[ix_hi] - response = requests.get(query_url(), params=query_params) + response = requests.get(query_url(), params=query_params, headers=h) assert_aggregate(response, [entity_id_1, entity_id_2], aggregator, ix_lo, ix_hi) +@pytest.mark.parametrize("service", services) @pytest.mark.parametrize('aggr_method, aggregator, ix_lo, ix_hi', [('count', len, lo, hi) for (lo, hi) in ix_intervals()] + [('sum', sum, lo, hi) for (lo, hi) in ix_intervals()] + @@ -292,7 +324,7 @@ def test_aggregating_entities_of_same_type(reporter_dataset, [('min', min, lo, hi) for (lo, hi) in ix_intervals()] + [('max', max, lo, hi) for (lo, hi) in ix_intervals()] ) -def test_aggregating_single_entity(reporter_dataset, +def test_aggregating_single_entity(service, reporter_dataset, aggr_method, aggregator, ix_lo, ix_hi): query_params = { @@ -300,10 +332,11 @@ def test_aggregating_single_entity(reporter_dataset, 'id': entity_id_1, 'aggrMethod': aggr_method } + h = {'Fiware-Service': service} if ix_lo is not None: query_params['fromDate'] = index[ix_lo] if ix_hi is not None: query_params['toDate'] = index[ix_hi] - response = requests.get(query_url(), params=query_params) + response = requests.get(query_url(), params=query_params, headers=h) assert_aggregate(response, [entity_id_1], aggregator, ix_lo, ix_hi) diff --git a/src/reporter/tests/test_NTNENA.py b/src/reporter/tests/test_NTNENA.py index f0c4edd8..90ee72a3 100644 --- a/src/reporter/tests/test_NTNENA.py +++ b/src/reporter/tests/test_NTNENA.py @@ -13,8 +13,7 @@ attrs = 'pressure' n_days = 4 -default_service = 't0' -service_1 = 't1' +services = ['t1', 't2'] def query_url(values=False): @@ -26,80 +25,103 @@ def query_url(values=False): ) -def query(values=False, params=None, service=default_service): +def query(values=False, params=None, service=None): h = {'Fiware-Service': service} return requests.get(query_url(values), params=params, headers=h) @pytest.fixture(scope='module') def reporter_dataset(): - - insert_test_data(default_service, [entity_type], n_entities=1, index_size=4, - entity_id=entity_id) - insert_test_data(default_service, [entity_type], n_entities=1, index_size=4, - entity_id=entity_id_1) - - insert_test_data(service_1, [entity_type], entity_id=entity_id, - index_size=3) - insert_test_data(service_1, [entity_type_1], entity_id=entity_id_1_1, - index_size=3) + for service in services: + insert_test_data(service, [entity_type], n_entities=1, index_size=4, + entity_id=entity_id) + insert_test_data(service, [entity_type], n_entities=1, index_size=4, + entity_id=entity_id_1) + insert_test_data(service, [entity_type_1], entity_id=entity_id_1_1, + index_size=3) yield - - delete_test_data(default_service, [entity_type]) - delete_test_data(service_1, [entity_type, entity_type_1]) + for service in services: + delete_test_data(service, [entity_type, entity_type_1]) -def test_NTNENA_defaults(reporter_dataset): - r = query() +@pytest.mark.parametrize("service", services) +def test_NTNENA_defaults(service, reporter_dataset): + r = query(service=service) assert r.status_code == 200, r.text # Assert Results expected_temperatures = list(range(4)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] + expected_temperatures_1 = list(range(3)) + expected_pressures_1 = [t * 10 for t in expected_temperatures_1] expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures + ] + expected_index_1 = [ + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures_1 ] expected_entities = [ { - 'entityId': 'Room1', + 'entityId': entity_id, 'index': expected_index, 'values': expected_temperatures }, { - 'entityId': 'Room2', + 'entityId': entity_id_1, 'index': expected_index, 'values': expected_temperatures } ] + expected_entities_1 = [ + { + 'entityId': entity_id_1_1, + 'index': expected_index_1, + 'values': expected_temperatures_1 + } + ] expected_entities_pressure = [ { - 'entityId': 'Room1', + 'entityId': entity_id, 'index': expected_index, 'values': expected_pressures }, { - 'entityId': 'Room2', + 'entityId': entity_id_1, 'index': expected_index, 'values': expected_pressures } ] + expected_entities_pressure_1 = [ + { + 'entityId': entity_id_1_1, + 'index': expected_index_1, + 'values': expected_pressures_1 + } + ] expected_types = [ + { + 'entities': expected_entities_1, + 'entityType': entity_type_1 + }, { 'entities': expected_entities, - 'entityType': 'Room' + 'entityType': entity_type } ] expected_types_pressure = [ + { + 'entities': expected_entities_pressure_1, + 'entityType': entity_type_1 + }, { 'entities': expected_entities_pressure, - 'entityType': 'Room' + 'entityType': entity_type } ] - expected_attrs = [ { 'attrName': 'pressure', @@ -119,30 +141,31 @@ def test_NTNENA_defaults(reporter_dataset): assert obtained == expected -def test_NTNENA_type(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNENA_type(service, reporter_dataset): # Query query_params = { 'type': entity_type } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text expected_temperatures = list(range(4)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] # Assert obtained = r.json() expected_values = list(range(4)) expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_values + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_values ] expected_entities = [ { - 'entityId': 'Room1', + 'entityId': entity_id, 'index': expected_index, 'values': expected_temperatures }, { - 'entityId': 'Room2', + 'entityId': entity_id_1, 'index': expected_index, 'values': expected_temperatures @@ -150,12 +173,12 @@ def test_NTNENA_type(reporter_dataset): ] expected_entities_pressure = [ { - 'entityId': 'Room1', + 'entityId': entity_id, 'index': expected_index, 'values': expected_pressures }, { - 'entityId': 'Room2', + 'entityId': entity_id_1, 'index': expected_index, 'values': expected_pressures @@ -165,13 +188,13 @@ def test_NTNENA_type(reporter_dataset): expected_types = [ { 'entities': expected_entities, - 'entityType': 'Room' + 'entityType': entity_type } ] expected_types_pressure = [ { 'entities': expected_entities_pressure, - 'entityType': 'Room' + 'entityType': entity_type } ] expected_attrs = [ @@ -193,21 +216,22 @@ def test_NTNENA_type(reporter_dataset): assert obtained == expected -def test_NTNE1A_one_entity(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE1A_one_entity(service, reporter_dataset): # Query query_params = { 'id': entity_id } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text expected_temperatures = list(range(4)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] # Assert obtained = r.json() expected_values = list(range(4)) expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_values + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_values ] expected_entities = [ { @@ -255,20 +279,21 @@ def test_NTNE1A_one_entity(reporter_dataset): assert obtained == expected -def test_1TNENA_some_entities(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_1TNENA_some_entities(service, reporter_dataset): # Query entity_ids = 'Room1,Room2' query_params = { 'id': entity_ids } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text # Assert Results expected_temperatures = list(range(n_days)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures ] expected_entities = [ @@ -329,19 +354,20 @@ def test_1TNENA_some_entities(reporter_dataset): assert obtained == expected -def test_NTNENA_values_defaults(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNENA_values_defaults(service, reporter_dataset): # Query query_params = { 'id': 'Room1,Room2,RoomNotValid', # -> validates to Room2,Room1. } - r = query(values=True, params=query_params) + r = query(values=True, params=query_params, service=service) assert r.status_code == 200, r.text # Assert Results expected_temperatures = list(range(n_days)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures ] expected_entities = [ { @@ -400,22 +426,24 @@ def test_NTNENA_values_defaults(reporter_dataset): assert obtained == expected -def test_NTNE_fromDate_toDate(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNE_fromDate_toDate(service, reporter_dataset): # Query query_params = { + 'type': entity_type, 'fromDate': "1970-01-01T00:00:00+00:00", 'toDate': "1970-01-04T00:00:00+00:00" } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text expected_temperatures = list(range(4)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] # Assert expected_values = list(range(4)) expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_values + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_values ] - + expected_entities = [ { 'entityId': 'Room1', @@ -473,20 +501,23 @@ def test_NTNE_fromDate_toDate(reporter_dataset): obtained = r.json() assert obtained == expected -def test_NTNENA_fromDate_toDate_with_quotes(reporter_dataset): + +@pytest.mark.parametrize("service", services) +def test_NTNENA_fromDate_toDate_with_quotes(service, reporter_dataset): # Query query_params = { + 'type': entity_type, 'fromDate': '"1970-01-01T00:00:00+00:00"', 'toDate': '"1970-01-04T00:00:00+00:00"' } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text expected_temperatures = list(range(4)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] # Assert expected_values = list(range(4)) expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_values + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_values ] expected_entities = [ { @@ -545,20 +576,23 @@ def test_NTNENA_fromDate_toDate_with_quotes(reporter_dataset): obtained = r.json() assert obtained == expected -def test_NTNENA_limit(reporter_dataset): + +@pytest.mark.parametrize("service", services) +def test_NTNENA_limit(service, reporter_dataset): # Query query_params = { + 'type': entity_type, 'limit': 10 } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text expected_temperatures = list(range(4)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] # Assert expected_values = list(range(4)) expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_values + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_values ] expected_entities = [ { @@ -618,7 +652,8 @@ def test_NTNENA_limit(reporter_dataset): assert obtained == expected -def test_NTNENA_combined(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNENA_combined(service, reporter_dataset): # Query query_params = { 'type': entity_type, @@ -626,15 +661,15 @@ def test_NTNENA_combined(reporter_dataset): 'toDate': "1970-01-04T00:00:00+00:00", 'limit': 10, } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text - + expected_temperatures = list(range(4)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] # Assert expected_values = list(range(4)) expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_values + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_values ] expected_entities = [ { @@ -694,7 +729,8 @@ def test_NTNENA_combined(reporter_dataset): assert obtained == expected -def test_weird_ids(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_weird_ids(service, reporter_dataset): """ Invalid ids are ignored (provided at least one is valid to avoid 404). Empty values are ignored. @@ -703,14 +739,14 @@ def test_weird_ids(reporter_dataset): query_params = { 'id': 'Room1,RoomNotValid,Room2,', # -> validates to Room2,Room1. } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text # Assert Results expected_temperatures = list(range(n_days)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_pressures = [t * 10 for t in expected_temperatures] expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures ] expected_entities = [ { @@ -770,18 +806,19 @@ def test_weird_ids(reporter_dataset): assert obtained == expected +@pytest.mark.parametrize("service", services) @pytest.mark.parametrize("aggr_period, exp_index, ins_period", [ - ("day", ['1970-01-01T00:00:00.000+00:00', - '1970-01-02T00:00:00.000+00:00', - '1970-01-03T00:00:00.000+00:00'], "hour"), - ("hour", ['1970-01-01T00:00:00.000+00:00', - '1970-01-01T01:00:00.000+00:00', - '1970-01-01T02:00:00.000+00:00'], "minute"), + ("day", ['1970-01-01T00:00:00.000+00:00', + '1970-01-02T00:00:00.000+00:00', + '1970-01-03T00:00:00.000+00:00'], "hour"), + ("hour", ['1970-01-01T00:00:00.000+00:00', + '1970-01-01T01:00:00.000+00:00', + '1970-01-01T02:00:00.000+00:00'], "minute"), ("minute", ['1970-01-01T00:00:00.000+00:00', '1970-01-01T00:01:00.000+00:00', '1970-01-01T00:02:00.000+00:00'], "second"), ]) -def test_NTNENA_aggrPeriod(aggr_period, exp_index, ins_period): +def test_NTNENA_aggrPeriod(service, aggr_period, exp_index, ins_period): etype = 'test_NTNENA_aggrPeriod' # The reporter_dataset fixture is still in the DB cos it has a scope of # module. We use a different entity type to store this test's rows in a @@ -792,7 +829,7 @@ def test_NTNENA_aggrPeriod(aggr_period, exp_index, ins_period): # Custom index to test aggrPeriod for i in exp_index: base = dateutil.parser.isoparse(i) - insert_test_data(default_service, + insert_test_data(service, [etype], index_size=5, index_base=base, @@ -802,7 +839,7 @@ def test_NTNENA_aggrPeriod(aggr_period, exp_index, ins_period): query_params = { 'aggrPeriod': aggr_period, } - r = requests.get(query_url(), params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 400, r.text # Check aggregation with aggrPeriod @@ -812,12 +849,12 @@ def test_NTNENA_aggrPeriod(aggr_period, exp_index, ins_period): 'aggrMethod': 'sum', 'aggrPeriod': aggr_period, } - r = query(params=query_params) + r = query(params=query_params, service=service) # Assert assert r.status_code == 200, r.text obtained = r.json() - delete_test_data(default_service, [etype]) + delete_test_data(service, [etype]) expected_temperatures = 0 + 1 + 2 + 3 + 4 expected_entities = [ @@ -848,11 +885,12 @@ def test_NTNENA_aggrPeriod(aggr_period, exp_index, ins_period): assert obtained == expected -def test_not_found(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_not_found(service, reporter_dataset): query_params = { 'type': 'NotThere' } - r = requests.get(query_url(), params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 404, r.text assert r.json() == { "error": "Not Found", @@ -860,34 +898,38 @@ def test_not_found(reporter_dataset): } -def test_NTNENA_aggrScope(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_NTNENA_aggrScope(service, reporter_dataset): # Notify users when not yet implemented query_params = { 'aggrMethod': 'avg', 'aggrScope': 'global', } - r = requests.get(query_url(), params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 501, r.text -def test_NTNENA_types_two_attribute(reporter_dataset): - r = query(service=service_1) +@pytest.mark.parametrize("service", services) +def test_NTNENA_types_two_attribute(service, reporter_dataset): + r = query(service=service) assert r.status_code == 200, r.text # Assert Results - expected_temperatures = list(range(3)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_temperatures = list(range(4)) + expected_temperatures_kitchen = list(range(3)) + expected_pressures = [t * 10 for t in expected_temperatures] + expected_pressures_kitchen = [t * 10 for t in expected_temperatures_kitchen] expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures ] expected_index_kitchen = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures_kitchen ] expected_entities_kitchen = [ { 'entityId': entity_id_1_1, 'index': expected_index_kitchen, - 'values': expected_pressures + 'values': expected_pressures_kitchen } ] expected_entities_room = [ @@ -895,13 +937,18 @@ def test_NTNENA_types_two_attribute(reporter_dataset): 'entityId': entity_id, 'index': expected_index, 'values': expected_pressures + }, + { + 'entityId': entity_id_1, + 'index': expected_index, + 'values': expected_pressures } ] expected_entities_kitchen_temp = [ { 'entityId': entity_id_1_1, 'index': expected_index_kitchen, - 'values': expected_temperatures + 'values': expected_temperatures_kitchen } ] expected_entities_room_temp = [ @@ -909,6 +956,11 @@ def test_NTNENA_types_two_attribute(reporter_dataset): 'entityId': entity_id, 'index': expected_index, 'values': expected_temperatures + }, + { + 'entityId': entity_id_1, + 'index': expected_index, + 'values': expected_temperatures } ] expected_types_new = [ @@ -920,7 +972,7 @@ def test_NTNENA_types_two_attribute(reporter_dataset): 'entities': expected_entities_room, 'entityType': entity_type } - ] + ] expected_types = [ { 'entities': expected_entities_kitchen_temp, @@ -930,7 +982,7 @@ def test_NTNENA_types_two_attribute(reporter_dataset): 'entities': expected_entities_room_temp, 'entityType': entity_type } - ] + ] expected_attrs = [ { 'attrName': 'pressure', @@ -948,28 +1000,31 @@ def test_NTNENA_types_two_attribute(reporter_dataset): assert obtained == expected -def test_1TNENA_types_one_attribute(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_1TNENA_types_one_attribute(service, reporter_dataset): query_params = { 'attrs': 'pressure' } - r = query(service=service_1, params=query_params) + r = query(service=service, params=query_params) assert r.status_code == 200, r.text # Assert Results - expected_temperatures = list(range(3)) - expected_pressures = [t*10 for t in expected_temperatures] + expected_temperatures = list(range(4)) + expected_temperatures_kitchen = list(range(3)) + expected_pressures = [t * 10 for t in expected_temperatures] + expected_pressures_kitchen = [t * 10 for t in expected_temperatures_kitchen] expected_index = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures ] expected_index_kitchen = [ - '1970-01-{:02}T00:00:00.000+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00.000+00:00'.format(i + 1) for i in expected_temperatures_kitchen ] expected_entities_kitchen = [ { 'entityId': entity_id_1_1, 'index': expected_index_kitchen, - 'values': expected_pressures + 'values': expected_pressures_kitchen } ] expected_entities_room = [ @@ -977,6 +1032,11 @@ def test_1TNENA_types_one_attribute(reporter_dataset): 'entityId': entity_id, 'index': expected_index, 'values': expected_pressures + }, + { + 'entityId': entity_id_1, + 'index': expected_index, + 'values': expected_pressures } ] expected_types = [ @@ -988,7 +1048,7 @@ def test_1TNENA_types_one_attribute(reporter_dataset): 'entities': expected_entities_room, 'entityType': entity_type } - ] + ] expected_attrs = [ { 'attrName': 'pressure', @@ -1002,13 +1062,14 @@ def test_1TNENA_types_one_attribute(reporter_dataset): assert obtained == expected -def test_aggregation_is_per_instance(reporter_dataset): +@pytest.mark.parametrize("service", services) +def test_aggregation_is_per_instance(service, reporter_dataset): query_params = { 'attrs': 'temperature', 'id': 'Room1,Room2', 'aggrMethod': 'sum' } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text obtained = r.json() @@ -1048,7 +1109,7 @@ def test_aggregation_is_per_instance(reporter_dataset): obtained = r.json() assert obtained == expected - + # Index array in the response is the used fromDate and toDate query_params = { @@ -1058,14 +1119,14 @@ def test_aggregation_is_per_instance(reporter_dataset): 'fromDate': datetime(1970, 1, 1).isoformat(), 'toDate': datetime(1970, 1, 2).isoformat(), } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text obtained = r.json() assert isinstance(obtained, dict) expected_temperatures = list(range(2)) expected_index = [ - '1970-01-{:02}T00:00:00+00:00'.format(i+1) for i in expected_temperatures + '1970-01-{:02}T00:00:00+00:00'.format(i + 1) for i in expected_temperatures ] expected_entities = [ { @@ -1097,7 +1158,7 @@ def test_aggregation_is_per_instance(reporter_dataset): 'id': 'Room1', 'aggrMethod': 'avg' } - r = query(params=query_params) + r = query(params=query_params, service=service) assert r.status_code == 200, r.text obtained = r.json() @@ -1107,7 +1168,7 @@ def test_aggregation_is_per_instance(reporter_dataset): { 'entityId': 'Room1', 'index': expected_index, - 'values': [sum(range(4))/4] + 'values': [sum(range(4)) / 4] } ] expected_types = [ diff --git a/src/reporter/tests/test_health.py b/src/reporter/tests/test_health.py index 4208079b..9f096db8 100644 --- a/src/reporter/tests/test_health.py +++ b/src/reporter/tests/test_health.py @@ -1,7 +1,8 @@ -from conftest import QL_BASE_URL +from conftest import QL_BASE_URL import pytest import requests + def test_health_pass(): """ At the time test starts, services are already deployed. diff --git a/src/reporter/tests/test_incomplete_entities.py b/src/reporter/tests/test_incomplete_entities.py index aa3795da..57ffe581 100644 --- a/src/reporter/tests/test_incomplete_entities.py +++ b/src/reporter/tests/test_incomplete_entities.py @@ -2,20 +2,21 @@ import time from conftest import QL_URL from .utils import send_notifications, delete_entity_type +import pytest -service = '' +services = ['t1', 't2'] -def notify(entity): +def notify(service, entity): notification_data = [{'data': [entity]}] send_notifications(service, notification_data) -def get_all_stored_attributes(entity_id): +def get_all_stored_attributes(service, entity_id): time.sleep(2) - + h = {'Fiware-Service': service} url = "{}/entities/{}".format(QL_URL, entity_id) - response = requests.get(url) + response = requests.get(url, headers=h) attrs = response.json().get('attributes', []) attr_values_map = {} @@ -53,7 +54,8 @@ def get_all_stored_attributes(entity_id): # } -def test_can_add_new_attribute(): +@pytest.mark.parametrize("service", services) +def test_can_add_new_attribute(service): a1_value = 123.0 a2_value = 'new attribute initial value' entity = { @@ -64,22 +66,23 @@ def test_can_add_new_attribute(): 'value': a1_value } } - notify(entity) + notify(service, entity) entity['a2'] = { 'type': 'Text', 'value': a2_value } - notify(entity) + notify(service, entity) - attr_values_map = get_all_stored_attributes(entity['id']) + attr_values_map = get_all_stored_attributes(service, entity['id']) assert len(attr_values_map) == 2 assert attr_values_map['a1'] == [a1_value, a1_value] assert attr_values_map['a2'] == [None, a2_value] delete_entity_type(service, 't1') -def test_can_add_new_attribute_even_without_specifying_old_ones(): +@pytest.mark.parametrize("service", services) +def test_can_add_new_attribute_even_without_specifying_old_ones(service): a1_value = 123.0 entity_1 = { 'id': 'u1:1', @@ -89,7 +92,7 @@ def test_can_add_new_attribute_even_without_specifying_old_ones(): 'value': a1_value } } - notify(entity_1) + notify(service, entity_1) a2_value = 'new attribute initial value' entity_2 = { @@ -100,15 +103,17 @@ def test_can_add_new_attribute_even_without_specifying_old_ones(): 'value': a2_value } } - notify(entity_2) + notify(service, entity_2) - attr_values_map = get_all_stored_attributes(entity_1['id']) + attr_values_map = get_all_stored_attributes(service, entity_1['id']) assert len(attr_values_map) == 2 assert attr_values_map['a1'] == [a1_value, None] assert attr_values_map['a2'] == [None, a2_value] delete_entity_type(service, 'u1') -def test_can_add_2_new_attribute_even_without_specifying_old_ones(): + +@pytest.mark.parametrize("service", services) +def test_can_add_2_new_attribute_even_without_specifying_old_ones(service): a1_value = 123.0 entity_1 = { 'id': 'u1:1', @@ -118,7 +123,7 @@ def test_can_add_2_new_attribute_even_without_specifying_old_ones(): 'value': a1_value } } - notify(entity_1) + notify(service, entity_1) a2_value = 'new attribute initial value' a3_value = True @@ -134,9 +139,9 @@ def test_can_add_2_new_attribute_even_without_specifying_old_ones(): 'value': a3_value } } - notify(entity_2) + notify(service, entity_2) - attr_values_map = get_all_stored_attributes(entity_1['id']) + attr_values_map = get_all_stored_attributes(service, entity_1['id']) assert len(attr_values_map) == 3 assert attr_values_map['a1'] == [a1_value, None] assert attr_values_map['a2'] == [None, a2_value] @@ -144,7 +149,9 @@ def test_can_add_2_new_attribute_even_without_specifying_old_ones(): delete_entity_type(service, 'u1') -def test_store_missing_text_value_as_null(): + +@pytest.mark.parametrize("service", services) +def test_store_missing_text_value_as_null(service): entity = { 'id': 't2:1', 'type': 't2', @@ -156,15 +163,17 @@ def test_store_missing_text_value_as_null(): 'type': 'Text' } } - notify(entity) + notify(service, entity) - attr_values_map = get_all_stored_attributes(entity['id']) + attr_values_map = get_all_stored_attributes(service, entity['id']) assert len(attr_values_map) == 2 assert attr_values_map['x'] == [None] delete_entity_type(service, 't2') -def test_store_missing_text_value_as_null_then_as_empty(): + +@pytest.mark.parametrize("service", services) +def test_store_missing_text_value_as_null_then_as_empty(service): entity = { 'id': 't3:1', 'type': 't3', @@ -176,18 +185,20 @@ def test_store_missing_text_value_as_null_then_as_empty(): 'type': 'Text' } } - notify(entity) + notify(service, entity) entity['x']['value'] = '' - notify(entity) + notify(service, entity) - attr_values_map = get_all_stored_attributes(entity['id']) + attr_values_map = get_all_stored_attributes(service, entity['id']) assert len(attr_values_map) == 2 assert attr_values_map['x'] == [None, ''] delete_entity_type(service, 't3') -def test_store_null_text_value_as_null(): + +@pytest.mark.parametrize("service", services) +def test_store_null_text_value_as_null(service): entity = { 'id': 't4:1', 'type': 't4', @@ -200,15 +211,17 @@ def test_store_null_text_value_as_null(): 'value': None } } - notify(entity) + notify(service, entity) - attr_values_map = get_all_stored_attributes(entity['id']) + attr_values_map = get_all_stored_attributes(service, entity['id']) assert len(attr_values_map) == 2 assert attr_values_map['x'] == [None] delete_entity_type(service, 't4') -def test_store_null_numeric_value_as_null(): + +@pytest.mark.parametrize("service", services) +def test_store_null_numeric_value_as_null(service): entity = { 'id': 't5:1', 'type': 't5', @@ -221,15 +234,17 @@ def test_store_null_numeric_value_as_null(): 'value': None } } - notify(entity) + notify(service, entity) - attr_values_map = get_all_stored_attributes(entity['id']) + attr_values_map = get_all_stored_attributes(service, entity['id']) assert len(attr_values_map) == 2 assert attr_values_map['x'] == [None] delete_entity_type(service, 't5') -def test_store_empty_numeric_value_as_null(): + +@pytest.mark.parametrize("service", services) +def test_store_empty_numeric_value_as_null(service): entity = { 'id': 't6:1', 'type': 't6', @@ -242,9 +257,9 @@ def test_store_empty_numeric_value_as_null(): 'value': '' } } - notify(entity) + notify(service, entity) - attr_values_map = get_all_stored_attributes(entity['id']) + attr_values_map = get_all_stored_attributes(service, entity['id']) assert len(attr_values_map) == 2 assert attr_values_map['x'] == [None] delete_entity_type(service, 't6') diff --git a/src/reporter/tests/test_integration.py b/src/reporter/tests/test_integration.py deleted file mode 100644 index 9099be3d..00000000 --- a/src/reporter/tests/test_integration.py +++ /dev/null @@ -1,115 +0,0 @@ -from conftest import QL_URL, ORION_URL, entity, clean_mongo -import json -import time -import requests -from .utils import delete_entity_type - -def test_integration(entity, clean_mongo): - # Subscribe QL to Orion - params = { - 'orionUrl': ORION_URL, - 'quantumleapUrl': QL_URL, - } - r = requests.post("{}/subscribe".format(QL_URL), params=params) - assert r.status_code == 201 - - # Insert values in Orion - h = {'Content-Type': 'application/json'} - data = json.dumps(entity) - r = requests.post('{}/entities'.format(ORION_URL), data=data, headers=h) - assert r.ok - time.sleep(1) - - # Update values in Orion - for i in range(1, 4): - attrs = { - 'temperature': { - 'value': entity['temperature']['value'] + i, - 'type': 'Number', - }, - 'pressure': { - 'value': entity['pressure']['value'] + i, - 'type': 'Number', - }, - } - endpoint = '{}/entities/{}/attrs'.format(ORION_URL, entity['id']) - r = requests.patch(endpoint, data=json.dumps(attrs), headers=h) - assert r.ok - time.sleep(1) - - # Query in Quantumleap - query_params = { - 'type': entity['type'], - } - query_url = "{qlUrl}/entities/{entityId}".format( - qlUrl=QL_URL, - entityId=entity['id'], - ) - r = requests.get(query_url, params=query_params) - assert r.status_code == 200, r.text - data = r.json() - assert len(data['index']) > 1 - assert len(data['attributes']) == 2 - - # Note some notifications may have been lost - pressures = data['attributes'][0]['values'] - assert set(pressures).issubset(set([720.0, 721.0, 722.0, 723.0])) - temperatures = data['attributes'][1]['values'] - assert set(temperatures).issubset(set([24.2, 25.2, 26.2, 27.2])) - delete_entity_type(None, entity['type']) - - -def test_integration_custom_index(entity, clean_mongo): - # Subscribe QL to Orion - params = { - 'orionUrl': ORION_URL, - 'quantumleapUrl': QL_URL, - 'timeIndexAttribute': 'myCustomIndex' - } - r = requests.post("{}/subscribe".format(QL_URL), params=params) - assert r.status_code == 201 - - # Insert values in Orion - entity['myCustomIndex'] = { - 'value': '2019-08-22T18:22:00', - 'type': 'DateTime', - 'metadata': {} - } - entity.pop('temperature') - entity.pop('pressure') - - data = json.dumps(entity) - h = {'Content-Type': 'application/json'} - r = requests.post('{}/entities'.format(ORION_URL), data=data, headers=h) - assert r.ok - time.sleep(1) - - # Update values in Orion - for i in range(1, 4): - attrs = { - 'myCustomIndex': { - 'value': '2019-08-22T18:22:0{}'.format(i), - 'type': 'DateTime', - }, - } - endpoint = '{}/entities/{}/attrs'.format(ORION_URL, entity['id']) - r = requests.patch(endpoint, data=json.dumps(attrs), headers=h) - assert r.ok - time.sleep(1) - - # Query in Quantumleap - query_params = { - 'type': entity['type'], - } - query_url = "{qlUrl}/entities/{entityId}".format( - qlUrl=QL_URL, - entityId=entity['id'], - ) - r = requests.get(query_url, params=query_params) - assert r.status_code == 200, r.text - - data = r.json() - # Note some notifications may have been lost - assert data['attributes'][0]['values'] == data['index'] - assert len(data['index']) > 1 - delete_entity_type(None, entity['type']) diff --git a/src/reporter/tests/test_multitenancy.py b/src/reporter/tests/test_multitenancy.py index 6a092cbf..5955e94a 100644 --- a/src/reporter/tests/test_multitenancy.py +++ b/src/reporter/tests/test_multitenancy.py @@ -12,18 +12,23 @@ results if used with "/eu/greece" or any other deviation from the path used at insertion. """ -from conftest import QL_URL, ORION_URL, entity, clean_mongo, clean_crate +from conftest import QL_URL, ORION_URL, entity, clean_mongo +from reporter.tests.utils import delete_test_data import json import time import requests +import pytest -def test_integration_with_orion(clean_mongo, clean_crate, entity): +services = ['t1', 't2'] + +@pytest.mark.parametrize("service", services) +def test_integration_with_orion(clean_mongo, service, entity): """ Make sure QL correctly handles headers in Orion's notification """ h = { 'Content-Type': 'application/json', - 'Fiware-Service': 'myservice', + 'Fiware-Service': service, 'Fiware-ServicePath': '/', } @@ -59,3 +64,4 @@ def test_integration_with_orion(clean_mongo, clean_crate, entity): # Query WITHOUT headers r = requests.get(url, params=query_params) assert r.status_code == 404, r.text + delete_test_data(service, ["Room"]) diff --git a/src/reporter/tests/test_notify.py b/src/reporter/tests/test_notify.py index 3a1679ef..1b2c1d7d 100644 --- a/src/reporter/tests/test_notify.py +++ b/src/reporter/tests/test_notify.py @@ -15,7 +15,8 @@ SLEEP_TIME = 1 -def query_url(entity_type='Room', eid='Room1', attr_name='temperature', values=False): +def query_url(entity_type='Room', eid='Room1', attr_name='temperature', + values=False): url = "{qlUrl}/entities/{entityId}/attrs/{attrName}" if values: url += '/value' @@ -142,7 +143,8 @@ def test_valid_no_modified(notification, service): delete_entity_type(service, 'Room') -def do_integration(entity, subscription, orion_client, service=None, service_path=None): +def do_integration(entity, subscription, orion_client, service=None, + service_path=None): orion_client.subscribe(subscription, service, service_path) time.sleep(SLEEP_TIME) @@ -151,7 +153,9 @@ def do_integration(entity, subscription, orion_client, service=None, service_pat entities_url = "{}/entities".format(QL_URL) - r = requests.get(entities_url, params=None, headers=None) + h = headers(service=service, service_path=service_path, content_type=False) + + r = requests.get(entities_url, params=None, headers=h) assert r.status_code == 200 entities = r.json() assert len(entities) == 1 @@ -159,10 +163,11 @@ def do_integration(entity, subscription, orion_client, service=None, service_pat assert entities[0]['id'] == entity['id'] assert entities[0]['type'] == entity['type'] - delete_entity_type(None, entity['type']) + delete_entity_type(service, entity['type'], service_path) -def test_integration(entity, orion_client): +@pytest.mark.parametrize("service", services) +def test_integration(service, entity, orion_client): """ Test Reporter using input directly from an Orion notification and output directly to Cratedb. @@ -193,10 +198,11 @@ def test_integration(entity, orion_client): }, "throttling": 1, } - do_integration(entity, subscription, orion_client) + do_integration(entity, subscription, orion_client, service, "/") -def test_air_quality_observed(air_quality_observed, orion_client): +@pytest.mark.parametrize("service", services) +def test_air_quality_observed(service, air_quality_observed, orion_client): entity = air_quality_observed subscription = { "description": "Test subscription", @@ -219,16 +225,17 @@ def test_air_quality_observed(air_quality_observed, orion_client): "metadata": ["dateCreated", "dateModified"] } } - do_integration(entity, subscription, orion_client) + do_integration(entity, subscription, orion_client, service, "/") -def test_integration_multiple_entities(diffEntityWithDifferentAttrs, orion_client): +@pytest.mark.parametrize("service", services) +def test_integration_multiple_entities(service, diffEntityWithDifferentAttrs, + orion_client): """ Test Reporter using input directly from an Orion notification and output directly to Cratedb. """ - subscription = { "description": "Integration Test subscription", "subject": { @@ -255,19 +262,167 @@ def test_integration_multiple_entities(diffEntityWithDifferentAttrs, orion_clien }, "throttling": 1, } - orion_client.subscribe(subscription, "service", "/Root/#") + orion_client.subscribe(subscription, service, "/Root/#") for idx, e in enumerate(diffEntityWithDifferentAttrs): - orion_client.insert(e, "service", "/Root/{}".format(idx)) + orion_client.insert(e, service, "/Root/{}".format(idx)) time.sleep(10 * SLEEP_TIME) # Give time for notification to be processed. entities_url = "{}/entities".format(QL_URL) - r = requests.get(entities_url, params=None, headers=query_header("service", "/Root")) + r = requests.get(entities_url, params=None, + headers=query_header(service, "/Root")) assert r.status_code == 200 entities = r.json() assert len(entities) == 3 - delete_entity_type("service", diffEntityWithDifferentAttrs[0]['type'], "/Root") + delete_entity_type(service, diffEntityWithDifferentAttrs[0]['type'], + "/Root") + + +@pytest.mark.skip("weird") +@pytest.mark.parametrize("service", services) +def test_integration_multiple_values(service, entity, orion_client, + clean_mongo): + subscription = { + "description": "Integration Test subscription", + "subject": { + "entities": [ + { + "id": entity['id'], + "type": "Room" + } + ], + "condition": { + "attrs": [] # all attributes + } + }, + "notification": { + "http": { + "url": notify_url + }, + "attrs": [], # all attributes + "metadata": ["dateCreated", "dateModified"] + }, + "throttling": 1, + } + + orion_client.subscribe(subscription, service, '/') + time.sleep(SLEEP_TIME) + + orion_client.insert(entity, service, '/') + time.sleep(4 * SLEEP_TIME) # Give time for notification to be processed. + + # Update values in Orion + for i in range(1, 4): + attrs = { + 'temperature': { + 'value': entity['temperature']['value'] + i, + 'type': 'Number', + }, + 'pressure': { + 'value': entity['pressure']['value'] + i, + 'type': 'Number', + }, + } + orion_client.update_attr(entity['id'], attrs, service, '/') + time.sleep(1) + + # Query in Quantumleap + query_params = { + 'type': entity['type'], + } + query_url = "{qlUrl}/entities/{entityId}".format( + qlUrl=QL_URL, + entityId=entity['id'], + ) + r = requests.get(query_url, params=query_params, + headers=query_header(service, "/")) + assert r.status_code == 200, r.text + data = r.json() + assert len(data['index']) > 1 + assert len(data['attributes']) == 2 + + # Note some notifications may have been lost + pressures = data['attributes'][0]['values'] + assert set(pressures).issubset(set([720.0, 721.0, 722.0, 723.0])) + temperatures = data['attributes'][1]['values'] + assert set(temperatures).issubset(set([24.2, 25.2, 26.2, 27.2])) + delete_entity_type(service, entity['type'], "/") + + +@pytest.mark.skip("weird") +@pytest.mark.parametrize("service", services) +def test_integration_custom_index(service, entity, orion_client, clean_mongo): + subscription = { + "description": "Integration Test subscription", + "subject": { + "entities": [ + { + "id": entity['id'], + "type": "Room" + } + ], + "condition": { + "attrs": [] # all attributes + } + }, + "notification": { + "httpCustom": { + "url": notify_url, + "headers": { + "Fiware-TimeIndex-Attribute": "myCustomIndex" + }, + }, + "attrs": [], # all attributes + "metadata": ["dateCreated", "dateModified"] + }, + "throttling": 1, + } + + orion_client.subscribe(subscription, service, '/') + time.sleep(SLEEP_TIME) + + # Insert values in Orion + entity['myCustomIndex'] = { + 'value': '2019-08-22T18:22:00', + 'type': 'DateTime', + 'metadata': {} + } + entity.pop('temperature') + entity.pop('pressure') + + orion_client.insert(entity, service, '/') + time.sleep(4 * SLEEP_TIME) # Give time for notification to be processed. + + # Update values in Orion + for i in range(1, 4): + attrs = { + 'myCustomIndex': { + 'value': '2019-08-22T18:22:0{}'.format(i), + 'type': 'DateTime', + }, + } + orion_client.update_attr(entity['id'], attrs, service, '/') + time.sleep(1) + + # Query in Quantumleap + query_params = { + 'type': entity['type'], + } + query_url = "{qlUrl}/entities/{entityId}".format( + qlUrl=QL_URL, + entityId=entity['id'], + ) + r = requests.get(query_url, params=query_params, + headers=query_header(service, "/")) + assert r.status_code == 200, r.text + + data = r.json() + # Note some notifications may have been lost + assert data['attributes'][0]['values'] == data['index'] + assert len(data['index']) > 1 + delete_entity_type(service, entity['type'], '/') + @pytest.mark.skip(reason="See issue #105") @pytest.mark.parametrize("service", services) @@ -308,11 +463,12 @@ def test_geocoding(service, notification): lon, lat = entities[0]['location']['values'][0].split(',') assert float(lon) == pytest.approx(60.1707129, abs=1e-2) assert float(lat) == pytest.approx(24.9412167, abs=1e-2) - delete_entity_type(service, notification['data'][0]['type']) + delete_entity_type(service, notification['data'][0]['type']) @pytest.mark.parametrize("service", services) -def test_multiple_data_elements(service, notification, diffEntityWithDifferentAttrs): +def test_multiple_data_elements(service, notification, + diffEntityWithDifferentAttrs): """ Test that the notify API can process notifications containing multiple elements in the data array. """ @@ -331,7 +487,9 @@ def test_multiple_data_elements(service, notification, diffEntityWithDifferentAt @pytest.mark.parametrize("service", services) -def test_multiple_data_elements_invalid_different_servicepath(service, notification, diffEntityWithDifferentAttrs): +def test_multiple_data_elements_invalid_different_servicepath(service, + notification, + diffEntityWithDifferentAttrs): """ Test that the notify API can process notifications containing multiple elements in the data array and different fiwareServicePath. @@ -339,7 +497,8 @@ def test_multiple_data_elements_invalid_different_servicepath(service, notificat notify_headers = notify_header(service) - notify_headers['Fiware-ServicePath'] = '/Test/Path1, /Test/Path1, /Test/Path2, /Test/Path3' + notify_headers[ + 'Fiware-ServicePath'] = '/Test/Path1, /Test/Path1, /Test/Path2, /Test/Path3' notification['data'] = diffEntityWithDifferentAttrs @@ -350,7 +509,8 @@ def test_multiple_data_elements_invalid_different_servicepath(service, notificat @pytest.mark.parametrize("service", services) -def test_multiple_data_elements_different_servicepath(service, notification, diffEntityWithDifferentAttrs): +def test_multiple_data_elements_different_servicepath(service, notification, + diffEntityWithDifferentAttrs): """ Test that the notify API can process notifications containing multiple elements in the data array and different fiwareServicePath. @@ -358,7 +518,8 @@ def test_multiple_data_elements_different_servicepath(service, notification, dif notify_headers = notify_header(service) - notify_headers['Fiware-ServicePath'] = '/Test/Path1, /Test/Path1, /Test/Path2' + notify_headers[ + 'Fiware-ServicePath'] = '/Test/Path1, /Test/Path1, /Test/Path2' query_headers = query_header(service) @@ -372,17 +533,19 @@ def test_multiple_data_elements_different_servicepath(service, notification, dif assert r.json().startswith('Notification successfully processed') entities_url = "{}/entities".format(QL_URL) - time.sleep(2*SLEEP_TIME) + time.sleep(2 * SLEEP_TIME) r = requests.get(entities_url, params=None, headers=query_headers) entities = r.json() assert len(entities) == 3 - delete_entity_type(service, diffEntityWithDifferentAttrs[0]['type'], '/Test') + delete_entity_type(service, diffEntityWithDifferentAttrs[0]['type'], + '/Test') @pytest.mark.parametrize("service", services) def test_time_index(service, notification): # If present, use entity-level dateModified as time_index - global_modified = datetime(2000, 1, 2, 0, 0, 0, 0, timezone.utc).isoformat() + global_modified = datetime(2000, 1, 2, 0, 0, 0, 0, + timezone.utc).isoformat() modified = { 'type': 'DateTime', 'value': global_modified @@ -460,7 +623,8 @@ def test_no_value_in_notification(service, notification): 'pm25': {'type': 'string', 'value': '5', 'metadata': {}}, } url = '{}'.format(notify_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Empty value @@ -472,7 +636,8 @@ def test_no_value_in_notification(service, notification): 'pm25': {'type': 'string', 'value': '', 'metadata': {}}, } url = '{}'.format(notify_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 delete_entity_type(service, notification['data'][0]['type']) @@ -490,7 +655,8 @@ def test_no_value_for_attributes(service, notification): url = '{}'.format(notify_url) get_url = "{}/entities/299531".format(QL_URL) url_new = '{}'.format(get_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) @@ -507,7 +673,8 @@ def test_no_value_for_attributes(service, notification): url = '{}'.format(notify_url) get_url = "{}/entities/299531/attrs/p/value".format(QL_URL) url_new = '{}'.format(get_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) @@ -523,7 +690,8 @@ def test_no_value_for_attributes(service, notification): url = '{}'.format(notify_url) get_url_new = "{}/entities/299531/attrs/pm10/value".format(QL_URL) url_new = '{}'.format(get_url_new) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) @@ -546,7 +714,8 @@ def test_no_value_no_type_for_attributes(service, notification): url = '{}'.format(notify_url) get_url = "{}/entities/Room1/attrs/temperature/value".format(QL_URL) url_new = '{}'.format(get_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) @@ -568,7 +737,8 @@ def test_no_value_no_type_for_attributes(service, notification): url = '{}'.format(notify_url) get_url = "{}/entities/Room1/attrs/temperature/value".format(QL_URL) url_new = '{}'.format(get_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) @@ -591,7 +761,8 @@ def test_with_value_no_type_for_attributes(service, notification): url = '{}'.format(notify_url) get_url = "{}/entities/Kitchen1/attrs/temperature/value".format(QL_URL) url_new = '{}'.format(get_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) @@ -614,7 +785,8 @@ def test_no_value_with_type_for_attributes(service, notification): url = '{}'.format(notify_url) get_url = "{}/entities/Hall1/attrs/temperature/value".format(QL_URL) url_new = '{}'.format(get_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) @@ -627,7 +799,7 @@ def test_no_value_with_type_for_attributes(service, notification): @pytest.mark.parametrize("service", services) def test_issue_382(service, notification): # entity with one Null value and no type - notification['data'][0] = { + notification['data'][0] = { "id": "urn:ngsi-ld:Test:0002", "type": "Test", "errorNumber": { @@ -647,9 +819,11 @@ def test_issue_382(service, notification): } } url = '{}'.format(notify_url) - get_url = "{}/entities/urn:ngsi-ld:Test:0002/attrs/errorNumber/value".format(QL_URL) + get_url = "{}/entities/urn:ngsi-ld:Test:0002/attrs/errorNumber/value".format( + QL_URL) url_new = '{}'.format(get_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) @@ -658,6 +832,7 @@ def test_issue_382(service, notification): assert res_get.json()['values'][0] == 2 delete_entity_type(service, notification['data'][0]['type']) + @pytest.mark.parametrize("service", services) def test_json_ld(service, notification): # entity with one Null value and no type @@ -720,13 +895,15 @@ def test_json_ld(service, notification): ] } url = '{}'.format(notify_url) - get_url = "{}/entities/urn:ngsi-ld:Streetlight:streetlight:guadalajara:4567/attrs/lanternHeight/value".format(QL_URL) + get_url = "{}/entities/urn:ngsi-ld:Streetlight:streetlight:guadalajara:4567/attrs/lanternHeight/value".format( + QL_URL) url_new = '{}'.format(get_url) - r = requests.post(url, data=json.dumps(notification), headers=notify_header(service)) + r = requests.post(url, data=json.dumps(notification), + headers=notify_header(service)) assert r.status_code == 200 # Give time for notification to be processed time.sleep(SLEEP_TIME) res_get = requests.get(url_new, headers=query_header(service)) assert res_get.status_code == 200 assert res_get.json()['values'][0] == 10 - delete_entity_type(service, notification['data'][0]['type']) \ No newline at end of file + delete_entity_type(service, notification['data'][0]['type']) diff --git a/src/reporter/tests/test_sql_injection.py b/src/reporter/tests/test_sql_injection.py index 0b43781d..df97566a 100644 --- a/src/reporter/tests/test_sql_injection.py +++ b/src/reporter/tests/test_sql_injection.py @@ -3,8 +3,7 @@ import requests import time import urllib -from .utils import send_notifications - +from .utils import send_notifications, delete_entity_type entity_type = 'TestDevice' @@ -55,7 +54,7 @@ def manage_db_entities(): yield - do_clean_crate() + delete_entity_type(service, entity_type) def query_sql(service, entity_id, query_params, response_code): diff --git a/src/reporter/tests/test_time_format.py b/src/reporter/tests/test_time_format.py index 0e27ec3b..07d70713 100644 --- a/src/reporter/tests/test_time_format.py +++ b/src/reporter/tests/test_time_format.py @@ -1,12 +1,14 @@ -from conftest import crate_translator as translator from reporter.tests.test_1T1E1A import query_url as query_1T1E1A, \ assert_1T1E1A_response from reporter.tests.utils import get_notification, send_notifications, delete_entity_type import requests import time +import pytest +services = ['t1', 't2'] -def check_time_index(input_index, expected_index=None): + +def check_time_index(service, input_index, expected_index=None): expected_index = expected_index or input_index n0 = get_notification('Room', 'Room0', 0, input_index[0]) @@ -32,7 +34,8 @@ def check_time_index(input_index, expected_index=None): delete_entity_type('', 'Room') -def test_index_iso(translator): +@pytest.mark.parametrize("service", services) +def test_index_iso(service): # If notifications use time-zone info, QL still responds in UTC input_index = [ '2010-10-10T09:09:00.792', @@ -44,10 +47,11 @@ def test_index_iso(translator): '2010-10-10T07:09:01.792+00:00', '2010-10-10T07:09:02.792+00:00', ] - check_time_index(input_index, expected_index) + check_time_index(service, input_index, expected_index) -def test_index_iso_with_time_zone(translator): +@pytest.mark.parametrize("service", services) +def test_index_iso_with_time_zone(service): # If notifications use time-zone info, QL still responds in UTC # #97: Make it return time info used in input. input_index = [ @@ -60,4 +64,4 @@ def test_index_iso_with_time_zone(translator): '2010-10-10T07:09:01.792+00:00', '2010-10-10T07:09:02.792+00:00', ] - check_time_index(input_index, expected_index) + check_time_index(service, input_index, expected_index) diff --git a/src/tests/docker-compose.yml b/src/tests/docker-compose.yml index 7f72dab3..f2afd046 100644 --- a/src/tests/docker-compose.yml +++ b/src/tests/docker-compose.yml @@ -59,5 +59,3 @@ volumes: networks: default: - driver_opts: - com.docker.network.driver.mtu: ${DOCKER_MTU:-1400} diff --git a/src/translators/crate.py b/src/translators/crate.py index eaa53037..0ad336be 100644 --- a/src/translators/crate.py +++ b/src/translators/crate.py @@ -47,6 +47,7 @@ class CrateTranslator(sql_translator.SQLTranslator): def __init__(self, host, port=4200, db_name="ngsi-tsdb"): super(CrateTranslator, self).__init__(host, port, db_name) self.logger = logging.getLogger(__name__) + self.dbCacheName = 'crate' self.ccm = None self.connection = None self.cursor = None @@ -94,7 +95,7 @@ def sql_error_handler(self, exception): def get_db_version(self): stmt = "select version['number'] from sys.nodes" - res = self._execute_query_via_cache("crate", + res = self._execute_query_via_cache(self.dbCacheName, "dbversion", stmt, None, 6000) return res[0][0] diff --git a/src/translators/sql_translator.py b/src/translators/sql_translator.py index 98ee68ea..55b25141 100644 --- a/src/translators/sql_translator.py +++ b/src/translators/sql_translator.py @@ -127,6 +127,7 @@ def __init__(self, host, port, db_name): if self.cache: self.default_ttl = self.cache.default_ttl self.start_time = datetime.now() + self.dbCacheName = 'sql' def dispose(self): dt = datetime.now() - self.start_time @@ -437,7 +438,7 @@ def _insert_original_entities_in_failed_batch( self.cursor.executemany(stmt, rows) def _attr_is_structured(self, a): - if a['value'] is not None and isinstance(a['value'], dict): + if 'value' in a and a['value'] is not None and isinstance(a['value'], dict): self.logger.debug("attribute {} has 'value' attribute of type dict" .format(a)) return True @@ -474,9 +475,9 @@ def _update_metadata_table(self, table_name, metadata): The dict mapping the matedata of each column. See original_attrs. """ - if not self._is_query_in_cache("quantumleap", METADATA_TABLE_NAME): + if not self._is_query_in_cache(self.dbCacheName, METADATA_TABLE_NAME): self._create_metadata_table() - self._cache("quantumleap", + self._cache(self.dbCacheName, METADATA_TABLE_NAME, None, self.default_ttl) @@ -487,7 +488,7 @@ def _update_metadata_table(self, table_name, metadata): # By design, one entry per table_name try: - res = self._execute_query_via_cache("quantumleap", + res = self._execute_query_via_cache(self.dbCacheName, table_name, stmt, [table_name], @@ -508,7 +509,7 @@ def _update_metadata_table(self, table_name, metadata): update = dict((k, metadata[k]) for k in diff if k in metadata) persisted_metadata.update(update) self._store_metadata(table_name, persisted_metadata) - self._cache("quantumleap", + self._cache(self.dbCacheName, table_name, [[persisted_metadata]], self.default_ttl) @@ -1155,7 +1156,7 @@ def delete_entities(self, etype, eid=None, from_date=None, to_date=None, key = None if fiware_service: key = fiware_service.lower() - self._remove_from_cache("quantumleap", table_name) + self._remove_from_cache(self.dbCacheName, table_name) self._remove_from_cache(key, "tableNames") return self.cursor.rowcount except Exception as e: @@ -1176,7 +1177,7 @@ def drop_table(self, etype, fiware_service=None): op = "delete from {} where table_name = ?".format(METADATA_TABLE_NAME) try: self.cursor.execute(op, [table_name]) - self._remove_from_cache("quantumleap", table_name) + self._remove_from_cache(self.dbCacheName, table_name) key = None if fiware_service: key = fiware_service.lower() diff --git a/src/translators/tests/original_data_scenarios.py b/src/translators/tests/original_data_scenarios.py index f9f34956..9fb1ced1 100644 --- a/src/translators/tests/original_data_scenarios.py +++ b/src/translators/tests/original_data_scenarios.py @@ -11,10 +11,10 @@ TYPE_PREFIX, TENANT_PREFIX from utils.jsondict import maybe_value - ENTITY_TYPE = 'device' TranslatorFactory = Callable[[], Generator[SQLTranslator, Any, None]] + # # NOTE. Each test scenario gets a (sort of) unique tenant so that we won't # have to clean up the DB after each test, which would slow down the whole @@ -23,7 +23,7 @@ def gen_tenant_id() -> str: - tid = random.randint(1, 2**32) + tid = random.randint(1, 2 ** 32) return f"tenant{tid}" @@ -55,9 +55,9 @@ def assert_saved_original(actual_row, original_entity, def assert_inserted_entity(actual_row, original_entity): assert actual_row['a_number'] == \ - maybe_value(original_entity, 'a_number', 'value') + maybe_value(original_entity, 'a_number', 'value') assert actual_row['an_attr'] == \ - maybe_value(original_entity, 'an_attr', 'value') + maybe_value(original_entity, 'an_attr', 'value') assert actual_row[ORIGINAL_ENTITY_COL] is None @@ -82,6 +82,9 @@ def __init__(self, translator: TranslatorFactory, cursor, self.cursor = cursor self.delay_query_by = delay_query_by + def get_translator(self): + return self.translator + def insert_entities(self, tenant: str, entities: List[dict]): with self.translator() as t: t.insert(entities, fiware_service=tenant) diff --git a/src/translators/tests/test_crate_aggregation.py b/src/translators/tests/test_aggregation.py similarity index 74% rename from src/translators/tests/test_crate_aggregation.py rename to src/translators/tests/test_aggregation.py index fd9cf0b1..d2249f5c 100644 --- a/src/translators/tests/test_crate_aggregation.py +++ b/src/translators/tests/test_aggregation.py @@ -1,8 +1,23 @@ -from conftest import crate_translator as translator +# To test a single translator use the -k parameter followed by either +# timescale or crate. +# See https://docs.pytest.org/en/stable/example/parametrize.html + +from conftest import crate_translator, timescale_translator from utils.common import create_random_entities, TIME_INDEX_NAME, add_attr import datetime +from src.utils.common import create_random_entities + +import pytest + + +translators = [ + pytest.lazy_fixture('crate_translator'), + pytest.lazy_fixture('timescale_translator') +] + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_aggr_per_second(translator): entities = create_random_entities(num_ids_per_type=2, num_updates=17) assert len(entities) == 34 @@ -47,4 +62,4 @@ def test_aggr_per_second(translator): 'values': [5, 15, 25, 32], } } - translator.clean() \ No newline at end of file + translator.clean() diff --git a/src/translators/tests/test_crate.py b/src/translators/tests/test_crate.py index 32cd2d9f..38c620ae 100644 --- a/src/translators/tests/test_crate.py +++ b/src/translators/tests/test_crate.py @@ -1,6 +1,4 @@ -from exceptions.exceptions import AmbiguousNGSIIdError -from translators.base_translator import BaseTranslator -from translators.crate import NGSI_TEXT +from translators.sql_translator import METADATA_TABLE_NAME, TYPE_PREFIX from conftest import crate_translator as translator, entity from utils.common import * from datetime import datetime, timezone @@ -14,396 +12,6 @@ def test_db_version(translator): assert major >= 3 -def test_insert(translator): - entities = create_random_entities(1, 2, 3, use_time=True, use_geo=True) - result = translator.insert(entities) - assert result.rowcount > 0 - translator.clean() - - -def test_insert_entity(translator, entity): - now = datetime.now(timezone.utc) - now_iso = now.isoformat(timespec='milliseconds') - entity[BaseTranslator.TIME_INDEX_NAME] = now_iso - - result = translator.insert([entity]) - assert result.rowcount != 0 - - loaded_entities = translator.query() - assert len(loaded_entities) == 1 - - check_notifications_record([entity], loaded_entities) - translator.clean() - -def test_insert_same_entity_with_different_attrs( translator, sameEntityWithDifferentAttrs ): - """ - Test that the CrateTranslator can insert entity updates that are of the same type but have different attributes. - """ - # Add time index to the updates. Use the dateModified meta data attribute of temperature. - for entity in sameEntityWithDifferentAttrs: - entity[BaseTranslator.TIME_INDEX_NAME] = entity['temperature']['metadata']['dateModified']['value'] - - result = translator.insert( sameEntityWithDifferentAttrs ) - assert result.rowcount != 0 - - loaded_entities = translator.query() - assert len(loaded_entities) == 1 - - check_notifications_record( sameEntityWithDifferentAttrs, loaded_entities) - translator.clean() - -def test_insert_multiple_types(translator): - args = { - 'num_types': 3, - 'num_ids_per_type': 2, - 'num_updates': 1, - 'use_time': True, - 'use_geo': True - } - entities = create_random_entities(**args) - result = translator.insert(entities) - assert result.rowcount > 0 - - # Again to check metadata handling works fine - entities = create_random_entities(**args) - result = translator.insert(entities) - assert result.rowcount > 0 - translator.clean() - - -def test_query_all_before_insert(translator): - # Query all - loaded_entities = translator.query() - assert len(loaded_entities) == 0 - - # Query Some - loaded_entities = translator.query(entity_type="Lamp", - fiware_service="openiot", - fiware_servicepath="/") - assert len(loaded_entities) == 0 - - # Query one - loaded_entities = translator.query(entity_id="Lamp:001", - fiware_service="openiot", - fiware_servicepath="/") - assert len(loaded_entities) == 0 - translator.clean() - - -def test_query_all(translator): - num_types = 2 - num_ids = 2 - num_updates = 2 - args = { - 'num_types': num_types, - 'num_ids_per_type': num_ids, - 'num_updates': num_updates, - 'use_time': True, - 'use_geo': True - } - entities = create_random_entities(**args) - result = translator.insert(entities) - assert result.rowcount > 0 - - loaded_entities = translator.query() - assert len(loaded_entities) == 2 * 2 - - for i in ['0-0', '0-1', '1-0', '1-1']: - notifications = [e for e in entities if e['id'] == i] - records = [e for e in loaded_entities if e['id'] == i] - check_notifications_record(notifications, records) - translator.clean() - - -def test_limit_0(translator): - entities = create_random_entities(num_updates=2) - result = translator.insert(entities) - assert result.rowcount > 0 - - loaded_entities = translator.query(last_n=0) - assert loaded_entities == [] - - loaded_entities = translator.query(limit=0) - assert loaded_entities == [] - translator.clean() - - -def test_limit_overrides_lastN(translator): - entities = create_random_entities(num_updates=7) - result = translator.insert(entities) - assert result.rowcount > 0 - - loaded_entities = translator.query(last_n=5, limit=3) - assert len(loaded_entities[0]['index']) == 3 - translator.clean() - - -def test_lastN_ordering(translator): - entities = create_random_entities(num_updates=5) - result = translator.insert(entities) - assert result.rowcount > 0 - - loaded_entities = translator.query(last_n=3) - index = loaded_entities[0]['index'] - assert len(index) == 3 - assert index[-1] > index[0] - translator.clean() - - -def test_attrs_by_entity_id(translator): - # First insert some data - num_updates = 10 - entities = create_random_entities(num_types=2, - num_ids_per_type=2, - num_updates=num_updates, - use_time=True, - use_geo=True) - translator.insert(entities) - - # Now query by entity id - entity_id = '0-1' - loaded_entities = translator.query(entity_type='0', entity_id=entity_id) - notifications = [e for e in entities - if e['type'] == '0' and e['id'] == '0-1'] - check_notifications_record(notifications, loaded_entities) - - # entity_type should be optional - entity_id = '1-1' - loaded_entities = translator.query(entity_id=entity_id) - notifications = [e for e in entities - if e['type'] == '1' and e['id'] == '1-1'] - check_notifications_record(notifications, loaded_entities) - - # nonexistent id should return no data - loaded_entities = translator.query(entity_id='some_nonexistent_id') - assert len(loaded_entities) == 0 - translator.clean() - - -def test_attrs_by_id_ambiguity(translator): - entities = create_random_entities(num_types=2, - num_ids_per_type=1, - num_updates=3) - for e in entities: - e['id'] = 'repeated_id' - - translator.insert(entities) - - # OK if specifying type - loaded_entities = translator.query(entity_type='0', entity_id='repeated_id') - assert len(loaded_entities[0]['index']) == 3 - assert len(loaded_entities) == 1 - - # NOT OK otherwise - with pytest.raises(AmbiguousNGSIIdError): - translator.query(entity_id='repeated_id') - translator.clean() - - -WITHIN_EAST_HEMISPHERE = "within(attr_geo, " \ - "'POLYGON ((0 -90, 180 -90, 180 90, 0 90, 0 -90))')" - - -def within_east_hemisphere(e): - return e["attr_geo"]["values"][0]["coordinates"][0] > 0 - - -def beyond_mid_epoch(e): - mid_epoch = datetime(1970, 6, 28).isoformat(timespec='milliseconds') - return e["attr_time"]["values"][0] > mid_epoch - - -@pytest.mark.parametrize("attr_name, clause, tester", [ - ("attr_bool", "= True", lambda e: e["attr_bool"]["values"][0]), - ("attr_str", "> 'M'", lambda e: e["attr_str"]["values"][0] > "M"), - ("attr_float", "< 0.5", lambda e: e["attr_float"]["values"][0] < 0.5), - ("attr_time", "> '1970-06-28T00:00'", beyond_mid_epoch), - (WITHIN_EAST_HEMISPHERE, "", within_east_hemisphere) -]) -def test_query_per_attribute(translator, attr_name, clause, tester): - num_types = 1 - num_ids_per_type = 2 - num_updates = 10 - - entities = create_random_entities(num_types, num_ids_per_type, num_updates, - use_time=True, use_geo=True) - translator.insert(entities) - - where_clause = "where {} {}".format(attr_name, clause) - entities = translator.query(entity_type='0', where_clause=where_clause) - - total = num_types * num_ids_per_type * num_updates - - assert len(entities) > 0, "No entities where found " \ - "with the clause: {}{}".format(attr_name, clause) - assert len(entities) < total, "All entities matched the clause. " \ - "Not expected from an " \ - "uniform random distribution" - assert all(map(tester, entities)) - translator.clean() - - -def test_unsupported_ngsi_type(translator): - e = { - "type": "SoMeWeIrDtYpE", - "id": "sOmEwEiRdId", - TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), - "foo": { - "type": "IgnoreThisDefinitivelyNotValidNGSITypeMessage", - "value": "BaR", - }, - } - translator.insert([e]) - entities = translator.query() - check_notifications_record([e], entities) - translator.clean() - -def test_accept_unknown_ngsi_type(translator): - """ - test to validate issue #129 - automatic casting to NGSI data type - https://github.com/smartsdk/ngsi-timeseries-api/issues/129 - """ - e = { - "type": "SoMeWeIrDtYpE", - "id": "sOmEwEiRdId", - TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), - "address": { - "type": "PostalAddress", - "value": { - "streetAddress": "18 Avenue Félix Faure", - "postalCode": "06000", - "addressLocality": "Nice", - "addressCountry": "France" - }, - }, - } - translator.insert([e]) - entities = translator.query() - check_notifications_record([e], entities) - translator.clean() - -def test_accept_special_chars(translator): - """ - test to validate issue #128 - attributes names and entity type containing '-' are not accepted by crateDB - https://github.com/smartsdk/ngsi-timeseries-api/issues/128 - """ - e = { - "type": "SoMe-WeIrD-tYpE", - "id": "sOmE:wEiRd.Id", - TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), - "address": { - "type": "Address-Type", - "value": { - "streetAddress": "18 Avenue Félix Faure", - "postalCode": "06000", - "addressLocality": "Nice", - "addressCountry": "France" - }, - }, - } - translator.insert([e]) - entities = translator.query() - check_notifications_record([e], entities) - translator.clean() - -def test_missing_type_defaults_to_string(translator): - e = { - "type": "SoMeWeIrDtYpE", - "id": "sOmEwEiRdId", - TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), - "foo": { - "value": "BaR", - }, - } - translator.insert([e]) - entities = translator.query() - assert len(entities) == 1 - - # Response will include the type - e["foo"]["type"] = NGSI_TEXT - check_notifications_record([e], entities) - translator.clean() - - -def test_capitals(translator): - entity_type = "SoMeWeIrDtYpE" - e1 = { - "type": entity_type, - "id": "sOmEwEiRdId", - TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), - "Foo": { - "type": "Text", - "value": "FoO", - }, - "bAr": { - "type": "Text", - "value": "bAr", - }, - } - translator.insert([e1]) - entities = translator.query() - assert len(entities) == 1 - check_notifications_record([e1], entities) - - # If a new attribute comes later, I want it translated as well. - e2 = e1.copy() - e2['id'] = 'SOmEwEiRdId2' - e2['NewAttr'] = {"type": "Text", "value": "NewAttrValue!"} - e2[TIME_INDEX_NAME] = datetime.now(timezone.utc).isoformat(timespec='milliseconds') - - translator.insert([e2]) - entities = translator.query() - assert len(entities) == 2 - - assert entities[0]['id'] == e2['id'] - assert entities[0]['NewAttr']['values'] == [e2['NewAttr']['value']] - - # Note that old entity gets None for the new attribute - assert entities[1]['id'] == e1['id'] - assert entities[1]['NewAttr']['values'] == [None] - translator.clean() - - -@pytest.mark.filterwarnings("ignore") -def test_no_time_index(translator): - """ - The Reporter is responsible for injecting the 'time_index' attribute to the - entity. If for some reason there's no such index, the translator will add - one with current_time. - """ - e = { - 'id': 'entityId1', - 'type': 'type1', - 'foo': {'type': 'Text', 'value': "SomeText"} - } - translator.insert([e]) - records = translator.query() - assert len(records) == 1 - assert len(records[0]['index']) == 1 - translator.clean() - - -def test_long_json(translator): - # Github issue 44 - big_entity = { - 'id': 'entityId1', - 'type': 'type1', - TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), - 'foo': { - 'type': 'Text', - 'value': "SomeTextThatWillGetLong" * 2000 - } - } - translator.insert([big_entity]) - - r = translator.query() - assert len(r) == 1 - check_notifications_record([big_entity], r) - translator.clean() - - def test_geo_point(translator): # Github issue #35: Support geo:point entity = { @@ -472,77 +80,28 @@ def test_geo_point_null_values(translator): translator.clean() -def test_structured_value_to_array(translator): - entity = { - 'id': '8906', - 'type': 'AirQualityObserved', - TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), - 'aqi': {'type': 'Number', 'value': 43}, - 'city': {'type': 'Text', 'value': 'Antwerpen'}, - 'h': {'type': 'Number', 'value': 93}, - 'location': { - 'type': 'geo:point', - 'value': '51.2056589, 4.4180728', - }, - 'measurand': { - 'type': 'StructuredValue', - 'value': ['pm25, 43, ugm3, PM25', 'pm10, 30, ugm3, PM10', - 'p, 1012, hPa, Pressure'] - }, - 'p': {'type': 'Number', 'value': 1012}, - 'pm10': {'type': 'Number', 'value': 30}, - 'pm25': {'type': 'Number', 'value': 43}, - 't': {'type': 'Number', 'value': 8.33} - } - translator.insert([entity]) - - r = translator.query() - check_notifications_record([entity], r) - translator.clean() - - -def test_ISO8601(translator): +def test_default_replication(translator): """ - ISO8601 should be a valid type, equivalent to DateTime. - """ - e = { - "type": "MyType", - "id": "MyId", - TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), - "iso_attr": { - "type": "ISO8601", - "value": "2018-03-20T13:26:38.722Z", - }, - } - translator.insert([e]) - - loaded = translator.query() - assert len(loaded) > 0 - check_notifications_record([e], loaded) - translator.clean() - - -################################################################################ -# FIWARE DATA MODELS -################################################################################ + By default there should be 2-all replicas -def test_air_quality_observed(translator, air_quality_observed): - # Add TIME_INDEX as Reporter would - now = datetime.now(timezone.utc).isoformat(timespec='milliseconds') - air_quality_observed[TIME_INDEX_NAME] = now - - translator.insert([air_quality_observed]) - loaded = translator.query() - check_notifications_record([air_quality_observed], loaded) - translator.clean() + https://crate.io/docs/crate/reference/en/latest/general/ddl/replication.html + """ + entities = create_random_entities(1, 2, 10) + entity = entities[0] + e_type = entity['type'] + translator.insert(entities) -def test_traffic_flow_observed(translator, traffic_flow_observed): - # Add TIME_INDEX as Reporter would - now = datetime.now(timezone.utc).isoformat(timespec='milliseconds') - traffic_flow_observed[TIME_INDEX_NAME] = now + et = '{}{}'.format(TYPE_PREFIX, e_type.lower()) + # same as in translator._et2tn but without double quotes + op = "select number_of_replicas from information_schema.tables where " \ + "table_name = '{}'" + translator.cursor.execute(op.format(et)) + res = translator.cursor.fetchall() + assert res[0] == ['2-all'] - translator.insert([traffic_flow_observed]) - loaded = translator.query() - check_notifications_record([traffic_flow_observed], loaded) + # Metadata table should also be replicated + translator.cursor.execute(op.format(METADATA_TABLE_NAME)) + res = translator.cursor.fetchall() + assert res[0] == ['2-all'] translator.clean() diff --git a/src/translators/tests/test_crate_original_data.py b/src/translators/tests/test_crate_original_data.py deleted file mode 100644 index 6f0db0f4..00000000 --- a/src/translators/tests/test_crate_original_data.py +++ /dev/null @@ -1,48 +0,0 @@ -from crate import client - -from translators.crate import CrateTranslatorInstance -from utils.cfgreader import * - -from .original_data_scenarios import * - - -@pytest.fixture(scope='module') -def with_crate(): - r = EnvReader(log=logging.getLogger(__name__).info) - host = r.read(StrVar('CRATE_HOST', 'crate')) - port = r.read(IntVar('CRATE_PORT', 4200)) - - conn = client.connect([f"{host}:{port}"], error_trace=True) - cursor = conn.cursor() - - yield OriginalDataScenarios(CrateTranslatorInstance, cursor, - delay_query_by=1) - - cursor.close() - conn.close() - - -def test_changed_attr_type_scenario(with_crate): - with_crate.run_changed_attr_type_scenario() - - -def test_inconsistent_attr_type_in_batch_scenario(with_crate): - with_crate.run_inconsistent_attr_type_in_batch_scenario() - - -def test_data_loss_scenario(with_crate): - with_crate.run_data_loss_scenario() - - -def test_success_scenario(with_crate): - with_crate.run_success_scenario() - - -def test_success_scenario_with_keep_raw_on(with_crate): - with_crate.run_success_scenario_with_keep_raw_on() - - -def test_query_failed_entities_scenario(with_crate): - with_crate.run_query_failed_entities_scenario( - fetch_batch_id_clause=f"{ORIGINAL_ENTITY_COL}['failedBatchID']" - ) diff --git a/src/translators/tests/test_crate_replication.py b/src/translators/tests/test_crate_replication.py deleted file mode 100644 index 3138697d..00000000 --- a/src/translators/tests/test_crate_replication.py +++ /dev/null @@ -1,30 +0,0 @@ -from translators.sql_translator import METADATA_TABLE_NAME, TYPE_PREFIX -from conftest import crate_translator as translator -from utils.common import create_random_entities - - -def test_default_replication(translator): - """ - By default there should be 2-all replicas - - https://crate.io/docs/crate/reference/en/latest/general/ddl/replication.html - """ - entities = create_random_entities(1, 2, 10) - entity = entities[0] - e_type = entity['type'] - - translator.insert(entities) - - et = '{}{}'.format(TYPE_PREFIX, e_type.lower()) - # same as in translator._et2tn but without double quotes - op = "select number_of_replicas from information_schema.tables where " \ - "table_name = '{}'" - translator.cursor.execute(op.format(et)) - res = translator.cursor.fetchall() - assert res[0] == ['2-all'] - - # Metadata table should also be replicated - translator.cursor.execute(op.format(METADATA_TABLE_NAME)) - res = translator.cursor.fetchall() - assert res[0] == ['2-all'] - translator.clean() diff --git a/src/translators/tests/test_crate_delete.py b/src/translators/tests/test_delete.py similarity index 86% rename from src/translators/tests/test_crate_delete.py rename to src/translators/tests/test_delete.py index 9175af48..7e235bbc 100644 --- a/src/translators/tests/test_crate_delete.py +++ b/src/translators/tests/test_delete.py @@ -1,8 +1,20 @@ +# To test a single translator use the -k parameter followed by either +# timescale or crate. +# See https://docs.pytest.org/en/stable/example/parametrize.html + from datetime import datetime -from conftest import crate_translator as translator +from conftest import crate_translator, timescale_translator from utils.common import create_random_entities, TIME_INDEX_NAME +import pytest + + +translators = [ + pytest.lazy_fixture('crate_translator'), + pytest.lazy_fixture('timescale_translator') +] +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_delete_entity_defaults(translator): num_types = 2 num_ids_per_type = 2 @@ -31,6 +43,7 @@ def test_delete_entity_defaults(translator): translator.clean() +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_delete_entity_customs(translator): entities = create_random_entities(num_types=1, num_ids_per_type=2, @@ -67,6 +80,7 @@ def test_delete_entity_customs(translator): translator.clean() +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_delete_entity_with_tenancy(translator): entities = create_random_entities(num_types=2, num_ids_per_type=2, @@ -98,6 +112,7 @@ def test_delete_entity_with_tenancy(translator): translator.clean(fs) +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_delete_entities_defaults(translator): entities = create_random_entities(num_types=3, num_ids_per_type=2, @@ -114,6 +129,7 @@ def test_delete_entities_defaults(translator): translator.clean() +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_delete_entities_customs(translator): entities = create_random_entities(num_types=4, num_ids_per_type=1, @@ -135,6 +151,7 @@ def test_delete_entities_customs(translator): translator.clean() +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_delete_entities_with_tenancy(translator): fs = 'fs' fsp = 'fsp' diff --git a/src/translators/tests/test_health.py b/src/translators/tests/test_health.py new file mode 100644 index 00000000..a8fe8e8e --- /dev/null +++ b/src/translators/tests/test_health.py @@ -0,0 +1,18 @@ +# To test a single translator use the -k parameter followed by either +# timescale or crate. +# See https://docs.pytest.org/en/stable/example/parametrize.html + +from conftest import crate_translator, timescale_translator + +import pytest + +translators = [ + pytest.lazy_fixture('crate_translator'), + pytest.lazy_fixture('timescale_translator') +] + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_health(translator): + health = translator.get_health() + assert health['status'] == 'pass' diff --git a/src/translators/tests/test_insert.py b/src/translators/tests/test_insert.py new file mode 100644 index 00000000..ce73174d --- /dev/null +++ b/src/translators/tests/test_insert.py @@ -0,0 +1,530 @@ +# To test a single translator use the -k parameter followed by either +# timescale or crate. +# See https://docs.pytest.org/en/stable/example/parametrize.html + +from exceptions.exceptions import AmbiguousNGSIIdError +from translators.base_translator import BaseTranslator +from translators.sql_translator import NGSI_TEXT +from utils.common import * +from datetime import datetime, timezone + +from src.utils.common import create_random_entities +from conftest import crate_translator, timescale_translator, entity +import pytest + + +translators = [ + pytest.lazy_fixture('crate_translator'), + pytest.lazy_fixture('timescale_translator') +] + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_insert(translator): + entities = create_random_entities(1, 2, 3, use_time=True, use_geo=True) + result = translator.insert(entities) + assert result.rowcount > 0 + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_insert_entity(translator, entity): + now = datetime.now(timezone.utc) + now_iso = now.isoformat(timespec='milliseconds') + entity[BaseTranslator.TIME_INDEX_NAME] = now_iso + + result = translator.insert([entity]) + assert result.rowcount != 0 + + loaded_entities = translator.query() + assert len(loaded_entities) == 1 + + check_notifications_record([entity], loaded_entities) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_insert_same_entity_with_different_attrs( translator, sameEntityWithDifferentAttrs ): + """ + Test that the CrateTranslator can insert entity updates that are of the same type but have different attributes. + """ + # Add time index to the updates. Use the dateModified meta data attribute of temperature. + for entity in sameEntityWithDifferentAttrs: + entity[BaseTranslator.TIME_INDEX_NAME] = entity['temperature']['metadata']['dateModified']['value'] + + result = translator.insert( sameEntityWithDifferentAttrs ) + assert result.rowcount != 0 + + loaded_entities = translator.query() + assert len(loaded_entities) == 1 + + check_notifications_record( sameEntityWithDifferentAttrs, loaded_entities) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_insert_multiple_types(translator): + args = { + 'num_types': 3, + 'num_ids_per_type': 2, + 'num_updates': 1, + 'use_time': True, + 'use_geo': True + } + entities = create_random_entities(**args) + result = translator.insert(entities) + assert result.rowcount > 0 + + # Again to check metadata handling works fine + entities = create_random_entities(**args) + result = translator.insert(entities) + assert result.rowcount > 0 + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_query_all_before_insert(translator): + # Query all + loaded_entities = translator.query() + assert len(loaded_entities) == 0 + + # Query Some + loaded_entities = translator.query(entity_type="Lamp", + fiware_service="openiot", + fiware_servicepath="/") + assert len(loaded_entities) == 0 + + # Query one + loaded_entities = translator.query(entity_id="Lamp:001", + fiware_service="openiot", + fiware_servicepath="/") + assert len(loaded_entities) == 0 + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_query_all(translator): + num_types = 2 + num_ids = 2 + num_updates = 2 + args = { + 'num_types': num_types, + 'num_ids_per_type': num_ids, + 'num_updates': num_updates, + 'use_time': True, + 'use_geo': True + } + entities = create_random_entities(**args) + result = translator.insert(entities) + assert result.rowcount > 0 + + loaded_entities = translator.query() + assert len(loaded_entities) == 2 * 2 + + for i in ['0-0', '0-1', '1-0', '1-1']: + notifications = [e for e in entities if e['id'] == i] + records = [e for e in loaded_entities if e['id'] == i] + check_notifications_record(notifications, records) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_limit_0(translator): + entities = create_random_entities(num_updates=2) + result = translator.insert(entities) + assert result.rowcount > 0 + + loaded_entities = translator.query(last_n=0) + assert loaded_entities == [] + + loaded_entities = translator.query(limit=0) + assert loaded_entities == [] + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_limit_overrides_lastN(translator): + entities = create_random_entities(num_updates=7) + result = translator.insert(entities) + assert result.rowcount > 0 + + loaded_entities = translator.query(last_n=5, limit=3) + assert len(loaded_entities[0]['index']) == 3 + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_lastN_ordering(translator): + entities = create_random_entities(num_updates=5) + result = translator.insert(entities) + assert result.rowcount > 0 + + loaded_entities = translator.query(last_n=3) + index = loaded_entities[0]['index'] + assert len(index) == 3 + assert index[-1] > index[0] + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_attrs_by_entity_id(translator): + # First insert some data + num_updates = 10 + entities = create_random_entities(num_types=2, + num_ids_per_type=2, + num_updates=num_updates, + use_time=True, + use_geo=True) + translator.insert(entities) + + # Now query by entity id + entity_id = '0-1' + loaded_entities = translator.query(entity_type='0', entity_id=entity_id) + notifications = [e for e in entities + if e['type'] == '0' and e['id'] == '0-1'] + check_notifications_record(notifications, loaded_entities) + + # entity_type should be optional + entity_id = '1-1' + loaded_entities = translator.query(entity_id=entity_id) + notifications = [e for e in entities + if e['type'] == '1' and e['id'] == '1-1'] + check_notifications_record(notifications, loaded_entities) + + # nonexistent id should return no data + loaded_entities = translator.query(entity_id='some_nonexistent_id') + assert len(loaded_entities) == 0 + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_attrs_by_id_ambiguity(translator): + entities = create_random_entities(num_types=2, + num_ids_per_type=1, + num_updates=3) + for e in entities: + e['id'] = 'repeated_id' + + translator.insert(entities) + + # OK if specifying type + loaded_entities = translator.query(entity_type='0', entity_id='repeated_id') + assert len(loaded_entities[0]['index']) == 3 + assert len(loaded_entities) == 1 + + # NOT OK otherwise + with pytest.raises(AmbiguousNGSIIdError): + translator.query(entity_id='repeated_id') + translator.clean() + +# TODO: This query is only for CRATE not for TIMESCALE +WITHIN_EAST_HEMISPHERE = "within(attr_geo, " \ + "'POLYGON ((0 -90, 180 -90, 180 90, 0 90, 0 -90))')" + + +def within_east_hemisphere(e): + return e["attr_geo"]["values"][0]["coordinates"][0] > 0 + + +def beyond_mid_epoch(e): + mid_epoch = datetime(1970, 6, 28).isoformat(timespec='milliseconds') + return e["attr_time"]["values"][0] > mid_epoch + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +@pytest.mark.parametrize("attr_name, clause, tester", [ + ("attr_bool", "= True", lambda e: e["attr_bool"]["values"][0]), + ("attr_str", "> 'M'", lambda e: e["attr_str"]["values"][0] > "M"), + ("attr_float", "< 0.5", lambda e: e["attr_float"]["values"][0] < 0.5), + ("attr_time", "> '1970-06-28T00:00'", beyond_mid_epoch) +## (WITHIN_EAST_HEMISPHERE, "", within_east_hemisphere) +]) +def test_query_per_attribute(translator, attr_name, clause, tester): + num_types = 1 + num_ids_per_type = 2 + num_updates = 10 + + entities = create_random_entities(num_types, num_ids_per_type, num_updates, + use_time=True, use_geo=True) + translator.insert(entities) + + where_clause = "where {} {}".format(attr_name, clause) + entities = translator.query(entity_type='0', where_clause=where_clause) + + total = num_types * num_ids_per_type * num_updates + + assert len(entities) > 0, "No entities where found " \ + "with the clause: {}{}".format(attr_name, clause) + assert len(entities) < total, "All entities matched the clause. " \ + "Not expected from an " \ + "uniform random distribution" + assert all(map(tester, entities)) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_unsupported_ngsi_type(translator): + e = { + "type": "SoMeWeIrDtYpE", + "id": "sOmEwEiRdId", + TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), + "foo": { + "type": "IgnoreThisDefinitivelyNotValidNGSITypeMessage", + "value": "BaR", + }, + } + translator.insert([e]) + entities = translator.query() + check_notifications_record([e], entities) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_accept_unknown_ngsi_type(translator): + """ + test to validate issue #129 + automatic casting to NGSI data type + https://github.com/smartsdk/ngsi-timeseries-api/issues/129 + """ + e = { + "type": "SoMeWeIrDtYpE", + "id": "sOmEwEiRdId", + TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), + "address": { + "type": "PostalAddress", + "value": { + "streetAddress": "18 Avenue Félix Faure", + "postalCode": "06000", + "addressLocality": "Nice", + "addressCountry": "France" + }, + }, + } + translator.insert([e]) + entities = translator.query() + check_notifications_record([e], entities) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_accept_special_chars(translator): + """ + test to validate issue #128 + attributes names and entity type containing '-' are not accepted by crateDB + https://github.com/smartsdk/ngsi-timeseries-api/issues/128 + """ + e = { + "type": "SoMe-WeIrD-tYpE", + "id": "sOmE:wEiRd.Id", + TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), + "address": { + "type": "Address-Type", + "value": { + "streetAddress": "18 Avenue Félix Faure", + "postalCode": "06000", + "addressLocality": "Nice", + "addressCountry": "France" + }, + }, + } + translator.insert([e]) + entities = translator.query() + check_notifications_record([e], entities) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_missing_type_defaults_to_string(translator): + e = { + "type": "SoMeWeIrDtYpE", + "id": "sOmEwEiRdId", + TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), + "foo": { + "value": "BaR", + }, + } + translator.insert([e]) + entities = translator.query() + assert len(entities) == 1 + + # Response will include the type + e["foo"]["type"] = NGSI_TEXT + check_notifications_record([e], entities) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_capitals(translator): + entity_type = "SoMeWeIrDtYpE" + e1 = { + "type": entity_type, + "id": "sOmEwEiRdId", + TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), + "Foo": { + "type": "Text", + "value": "FoO", + }, + "bAr": { + "type": "Text", + "value": "bAr", + }, + } + translator.insert([e1]) + entities = translator.query() + assert len(entities) == 1 + check_notifications_record([e1], entities) + + # If a new attribute comes later, I want it translated as well. + e2 = e1.copy() + e2['id'] = 'SOmEwEiRdId2' + e2['NewAttr'] = {"type": "Text", "value": "NewAttrValue!"} + e2[TIME_INDEX_NAME] = datetime.now(timezone.utc).isoformat(timespec='milliseconds') + + translator.insert([e2]) + entities = translator.query() + assert len(entities) == 2 + + assert entities[0]['id'] == e2['id'] + assert entities[0]['NewAttr']['values'] == [e2['NewAttr']['value']] + + # Note that old entity gets None for the new attribute + assert entities[1]['id'] == e1['id'] + assert entities[1]['NewAttr']['values'] == [None] + translator.clean() + + +@pytest.mark.filterwarnings("ignore") +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_no_time_index(translator): + """ + The Reporter is responsible for injecting the 'time_index' attribute to the + entity. If for some reason there's no such index, the translator will add + one with current_time. + """ + e = { + 'id': 'entityId1', + 'type': 'type1', + 'foo': {'type': 'Text', 'value': "SomeText"} + } + translator.insert([e]) + records = translator.query() + assert len(records) == 1 + assert len(records[0]['index']) == 1 + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_long_json(translator): + # Github issue 44 + big_entity = { + 'id': 'entityId1', + 'type': 'type1', + TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), + 'foo': { + 'type': 'Text', + 'value': "SomeTextThatWillGetLong" * 2000 + } + } + translator.insert([big_entity]) + + r = translator.query() + assert len(r) == 1 + check_notifications_record([big_entity], r) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_structured_value_to_array(translator): + entity = { + 'id': '8906', + 'type': 'AirQualityObserved', + TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), + 'aqi': {'type': 'Number', 'value': 43}, + 'city': {'type': 'Text', 'value': 'Antwerpen'}, + 'h': {'type': 'Number', 'value': 93}, + 'location': { + 'type': 'geo:point', + 'value': '51.2056589, 4.4180728', + }, + 'measurand': { + 'type': 'StructuredValue', + 'value': ['pm25, 43, ugm3, PM25', 'pm10, 30, ugm3, PM10', + 'p, 1012, hPa, Pressure'] + }, + 'p': {'type': 'Number', 'value': 1012}, + 'pm10': {'type': 'Number', 'value': 30}, + 'pm25': {'type': 'Number', 'value': 43}, + 't': {'type': 'Number', 'value': 8.33} + } + translator.insert([entity]) + + r = translator.query() + check_notifications_record([entity], r) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_ISO8601(translator): + """ + ISO8601 should be a valid type, equivalent to DateTime. + """ + e = { + "type": "MyType", + "id": "MyId", + TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), + "iso_attr": { + "type": "ISO8601", + "value": "2018-03-20T13:26:38.722Z", + }, + } + translator.insert([e]) + + loaded = translator.query() + assert len(loaded) > 0 + check_notifications_record([e], loaded) + translator.clean() + + +################################################################################ +# FIWARE DATA MODELS +################################################################################ +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_air_quality_observed(translator, air_quality_observed): + # Add TIME_INDEX as Reporter would + now = datetime.now(timezone.utc).isoformat(timespec='milliseconds') + air_quality_observed[TIME_INDEX_NAME] = now + + translator.insert([air_quality_observed]) + loaded = translator.query() + check_notifications_record([air_quality_observed], loaded) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_traffic_flow_observed(translator, traffic_flow_observed): + # Add TIME_INDEX as Reporter would + now = datetime.now(timezone.utc).isoformat(timespec='milliseconds') + traffic_flow_observed[TIME_INDEX_NAME] = now + + translator.insert([traffic_flow_observed]) + loaded = translator.query() + check_notifications_record([traffic_flow_observed], loaded) + translator.clean() + + +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) +def test_ngsi_ld(translator, ngsi_ld): + # Add TIME_INDEX as Reporter would + now = datetime.now(timezone.utc).isoformat(timespec='milliseconds') + ngsi_ld[TIME_INDEX_NAME] = now + # Remove @context as Reporter would + ngsi_ld.pop('@context') + + translator.insert([ngsi_ld]) + loaded = translator.query() + + assert ngsi_ld['id'] == loaded[0]['id'] + assert ngsi_ld['refStreetlightModel']['object'] == loaded[0]['refStreetlightModel']['values'][0] + assert ngsi_ld['location']['value'] == loaded[0]['location']['values'][0] + + translator.clean() diff --git a/src/translators/tests/test_crate_multientities.py b/src/translators/tests/test_multientities.py similarity index 77% rename from src/translators/tests/test_crate_multientities.py rename to src/translators/tests/test_multientities.py index d604771c..72354afd 100644 --- a/src/translators/tests/test_crate_multientities.py +++ b/src/translators/tests/test_multientities.py @@ -1,5 +1,5 @@ """ -Test Crate queries that span across multiple entities (of the same type for +Test queries that span across multiple entities (of the same type for now). Query has historically accepted an entity_id parameter (the id of the entity). @@ -7,11 +7,24 @@ It has been refactored to be called entity_ids and be a list of ids. For now, having more than one element in this list requires the type to be specified and unique. + +To test a single translator use the -k parameter followed by either +timescale or crate. +See https://docs.pytest.org/en/stable/example/parametrize.html """ -from conftest import crate_translator as translator from utils.common import create_random_entities +from conftest import crate_translator, timescale_translator + +import pytest + + +translators = [ + pytest.lazy_fixture('crate_translator'), + pytest.lazy_fixture('timescale_translator') +] +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_query_multiple_ids(translator): # First insert some data num_updates = 3 @@ -32,6 +45,7 @@ def test_query_multiple_ids(translator): translator.clean() +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_query_multiple_ids_bak(translator): # Should not break old usage of one single entity_id num_updates = 3 @@ -46,6 +60,7 @@ def test_query_multiple_ids_bak(translator): translator.clean() +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_query_multiple_ids_with_invalids(translator): # Nonexistent ids should be ignored num_updates = 3 diff --git a/src/translators/tests/test_crate_multitenancy.py b/src/translators/tests/test_multitenancy.py similarity index 83% rename from src/translators/tests/test_crate_multitenancy.py rename to src/translators/tests/test_multitenancy.py index 3d3ad5bb..3cc86cd6 100644 --- a/src/translators/tests/test_crate_multitenancy.py +++ b/src/translators/tests/test_multitenancy.py @@ -1,5 +1,5 @@ """ -The crate translator understands about FIWARE-Service and FIWARE-ServicePath. +The translator understands about FIWARE-Service and FIWARE-ServicePath. The FIWARE-Service is used as a crate db schema. By default, no schema is specified (Crate uses "doc" schema as default). @@ -10,10 +10,23 @@ The queries using FIWARE-ServicePath will work like... select * from entityX where path ~ '/path/here($|/.*)'; + + +To test a single translator use the -k parameter followed by either +timescale or crate. +See https://docs.pytest.org/en/stable/example/parametrize.html """ from datetime import datetime from utils.common import TIME_INDEX_NAME -from conftest import crate_translator as translator +from conftest import crate_translator, timescale_translator + +import pytest + + +translators = [ + pytest.lazy_fixture('crate_translator'), + pytest.lazy_fixture('timescale_translator') +] def entity(entity_id): @@ -28,7 +41,7 @@ def entity(entity_id): } return e - +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_fiware_tenant(translator): # Insert WITH tenant e = entity("Room1") @@ -46,6 +59,7 @@ def test_fiware_tenant(translator): translator.clean(fs) +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_fiware_tenant_services(translator): # Insert in tenant A e = entity("X") @@ -68,6 +82,7 @@ def test_fiware_tenant_services(translator): translator.clean("B") +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_fiware_tenant_servicepath(translator): def insert_with_tenant(e, path): translator.insert([e], fiware_service="EU", fiware_servicepath=path) @@ -101,6 +116,7 @@ def insert_with_tenant(e, path): translator.clean("EU") +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_fiware_empty_tenant_is_no_tenant(translator): # Insert with EMPTY tenant e = entity("Room1") @@ -122,6 +138,7 @@ def test_fiware_empty_tenant_is_no_tenant(translator): translator.clean() +@pytest.mark.parametrize("translator", translators, ids=["crate", "timescale"]) def test_fiware_tenant_reserved_word(translator): e = entity("Room1") fs = "default" diff --git a/src/translators/tests/test_original_data.py b/src/translators/tests/test_original_data.py new file mode 100644 index 00000000..437ce3e1 --- /dev/null +++ b/src/translators/tests/test_original_data.py @@ -0,0 +1,87 @@ +import pg8000 +from crate import client + +from translators.timescale import postgres_translator_instance, \ + PostgresConnectionData +from translators.crate import CrateTranslatorInstance +from utils.cfgreader import * + +from .original_data_scenarios import * + +# To test a single translator use the -k parameter followed by either +# timescale or crate. +# See https://docs.pytest.org/en/stable/example/parametrize.html + +@pytest.fixture(scope='module') +def with_timescale(): + pg8000.paramstyle = "qmark" + t = PostgresConnectionData() + t.read_env() + + pg_conn = pg8000.connect(host=t.host, port=t.port, + database=t.db_name, + user=t.db_user, password=t.db_pass) + pg_conn.autocommit = True + pg_cursor = pg_conn.cursor() + + yield OriginalDataScenarios(postgres_translator_instance, pg_cursor) + + pg_cursor.close() + pg_conn.close() + + +@pytest.fixture(scope='module') +def with_crate(): + r = EnvReader(log=logging.getLogger(__name__).info) + host = r.read(StrVar('CRATE_HOST', 'crate')) + port = r.read(IntVar('CRATE_PORT', 4200)) + + conn = client.connect([f"{host}:{port}"], error_trace=True) + cursor = conn.cursor() + + yield OriginalDataScenarios(CrateTranslatorInstance, cursor, + delay_query_by=1) + + cursor.close() + conn.close() + +translators = [ + pytest.lazy_fixture('with_timescale'), + pytest.lazy_fixture('with_crate') +] + + +@pytest.mark.parametrize("translator", translators, ids=["timescale", "crate"]) +def test_changed_attr_type_scenario(translator): + translator.run_changed_attr_type_scenario() + + +@pytest.mark.parametrize("translator", translators, ids=["timescale", "crate"]) +def test_inconsistent_attr_type_in_batch_scenario(translator): + translator.run_inconsistent_attr_type_in_batch_scenario() + + +@pytest.mark.parametrize("translator", translators, ids=["timescale", "crate"]) +def test_data_loss_scenario(translator): + translator.run_data_loss_scenario() + + +@pytest.mark.parametrize("translator", translators, ids=["timescale", "crate"]) +def test_success_scenario(translator): + translator.run_success_scenario() + + +@pytest.mark.parametrize("translator", translators, ids=["timescale", "crate"]) +def test_success_scenario_with_keep_raw_on(translator): + translator.run_success_scenario_with_keep_raw_on() + + +@pytest.mark.parametrize("translator", translators, ids=["timescale", "crate"]) +def test_query_failed_entities_scenario(translator): + clause = f"({ORIGINAL_ENTITY_COL} ->> 'failedBatchID')" + if translator.get_translator() == CrateTranslatorInstance: + clause = f"{ORIGINAL_ENTITY_COL}['failedBatchID']" + + translator.run_query_failed_entities_scenario( + fetch_batch_id_clause=clause + ) diff --git a/src/translators/tests/test_timescale_insert.py b/src/translators/tests/test_timescale_insert.py index 0d07cff8..d9f9ca4d 100644 --- a/src/translators/tests/test_timescale_insert.py +++ b/src/translators/tests/test_timescale_insert.py @@ -62,9 +62,7 @@ def gen_entity(entity_type): 'type': 'geo:json', 'value': { 'type': 'LineString', - 'coordinates': [[30, 10], [10, 30], [40, 40]], - 'crs': {'properties': {'name': 'EPSG4326'}, 'type': 'name'}, - 'meta': {'srid': 4326} + 'coordinates': [[30, 10], [10, 30], [40, 40]] } }, 'a_text': { @@ -93,8 +91,6 @@ def assert_inserted_entity_values(entity, row): assert row['a_datetime'] == datetime(2019, 7, 22, 11, 46, 45, 123000, tzinfo=timezone.utc) assert decode_wkb_hexstr(row['a_point']) == { - 'crs': {'properties': {'name': 'EPSG4326'}, 'type': 'name'}, - 'meta': {'srid': 4326}, 'type': 'Point', 'coordinates': [1.0, 2.0] # note how lat/lon get swapped } diff --git a/src/translators/tests/test_timescale_original_data.py b/src/translators/tests/test_timescale_original_data.py deleted file mode 100644 index b8311698..00000000 --- a/src/translators/tests/test_timescale_original_data.py +++ /dev/null @@ -1,50 +0,0 @@ -import pg8000 - -from translators.timescale import postgres_translator_instance, \ - PostgresConnectionData - -from .original_data_scenarios import * - - -@pytest.fixture(scope='module') -def with_timescale(): - pg8000.paramstyle = "qmark" - t = PostgresConnectionData() - t.read_env() - - pg_conn = pg8000.connect(host=t.host, port=t.port, - database=t.db_name, - user=t.db_user, password=t.db_pass) - pg_conn.autocommit = True - pg_cursor = pg_conn.cursor() - - yield OriginalDataScenarios(postgres_translator_instance, pg_cursor) - - pg_cursor.close() - pg_conn.close() - - -def test_changed_attr_type_scenario(with_timescale): - with_timescale.run_changed_attr_type_scenario() - - -def test_inconsistent_attr_type_in_batch_scenario(with_timescale): - with_timescale.run_inconsistent_attr_type_in_batch_scenario() - - -def test_data_loss_scenario(with_timescale): - with_timescale.run_data_loss_scenario() - - -def test_success_scenario(with_timescale): - with_timescale.run_success_scenario() - - -def test_success_scenario_with_keep_raw_on(with_timescale): - with_timescale.run_success_scenario_with_keep_raw_on() - - -def test_query_failed_entities_scenario(with_timescale): - with_timescale.run_query_failed_entities_scenario( - fetch_batch_id_clause=f"({ORIGINAL_ENTITY_COL} ->> 'failedBatchID')" - ) diff --git a/src/translators/timescale.py b/src/translators/timescale.py index 1e8851ce..73c7f7d9 100644 --- a/src/translators/timescale.py +++ b/src/translators/timescale.py @@ -79,6 +79,7 @@ def __init__(self, conn_data=PostgresConnectionData()): self.connection = None self.cursor = None self.logger = logging.getLogger(__name__) + self.dbCacheName = 'timescale' def setup(self): self.ccm = ConnectionManager() @@ -108,6 +109,25 @@ def sql_error_handler(self, exception): self.ccm.reset_connection('timescale') self.setup() + def get_health(self): + health = {} + + op = "SELECT * FROM information_schema.tables" + health['time'] = datetime.utcnow().isoformat(timespec='milliseconds') + try: + self.cursor.execute(op) + + except Exception as e: + msg = "{}".format(e) + logging.debug(msg) + health['status'] = 'fail' + health['output'] = msg + + else: + health['status'] = 'pass' + + return health + @staticmethod def _svc_to_schema_name(fiware_service): if fiware_service: @@ -168,7 +188,6 @@ def _preprocess_values(self, e, original_attrs, col_names, fiware_servicepath): try: attr = original_attrs[cn][0] attr_t = original_attrs[cn][1] - ngsi_value = e[attr]['value'] mapped_type = self._compute_type(e['id'], attr_t, e[attr]) if SlfGeometry.is_ngsi_slf_attr(e[attr]): @@ -177,14 +196,14 @@ def _preprocess_values(self, e, original_attrs, col_names, fiware_servicepath): ast, srid=4326) elif mapped_type == NGSI_TO_SQL[NGSI_GEOJSON]: mapped_value = geocoding.geojson.wktcodec.encode_as_wkt( - ngsi_value, srid=4326) + e[attr]['value'], srid=4326) elif mapped_type == NGSI_TO_SQL[NGSI_STRUCTURED_VALUE]: - mapped_value = pg8000.PGJsonb(ngsi_value) + mapped_value = pg8000.PGJsonb(e[attr]['value']) elif mapped_type == NGSI_TO_SQL[NGSI_TEXT] \ - and ngsi_value is not None: - mapped_value = str(ngsi_value) + and 'value' in e[attr] and e[attr]['value'] is not None: + mapped_value = str(e[attr]['value']) elif mapped_type == PG_JSON_ARRAY: - mapped_value = pg8000.PGJsonb(ngsi_value) + mapped_value = pg8000.PGJsonb(e[attr]['value']) elif 'type' in e[attr] and e[attr]['type'] == 'Property' \ and 'value' in e[attr] \ and isinstance(e[attr]['value'], dict) \ @@ -196,7 +215,7 @@ def _preprocess_values(self, e, original_attrs, col_names, fiware_servicepath): mapped_value = e[attr].get('value', None) or \ e[attr].get('object', None) else: - mapped_value = ngsi_value + mapped_value = e[attr]['value'] values.append(mapped_value) except KeyError: @@ -222,7 +241,7 @@ def _db_value_to_ngsi(self, db_value: Any, ngsi_type: str) -> Any: slf_geom = geocoding.slf.jsoncodec.decode(geo_json, ngsi_type) return slf_geom.to_ngsi_attribute()['value'] if slf_geom else None - if ngsi_type == NGSI_GEOJSON: + if ngsi_type == NGSI_GEOJSON or ngsi_type == NGSI_LD_GEOMETRY: return geocoding.geojson.wktcodec.decode_wkb_hexstr(db_value) return db_value @@ -234,6 +253,8 @@ def _db_value_to_ngsi(self, db_value: Any, ngsi_type: str) -> Any: # 2. Basic types (int, float, boolean and text). They also get converted # back to their corresponding Python types. + # TODO with the new pg8000 PGJsonb is removed... + # it simply replace with json dumps() @staticmethod def _to_db_ngsi_structured_value(data: dict) -> pg8000.PGJsonb: return pg8000.PGJsonb(data) diff --git a/src/utils/common.py b/src/utils/common.py index e36b4b14..f431e827 100644 --- a/src/utils/common.py +++ b/src/utils/common.py @@ -122,8 +122,9 @@ def create_random_entities(num_types=1, add_attr(entity, "attr_time", v_iso) if use_geo: - long = random.uniform(-180, 180) - lat = random.uniform(-90, 90) + #precision of postgis does not allow more than 16 decimals + long = round(random.uniform(-180, 180), 10) + lat = round(random.uniform(-90, 90), 10) point = {"type": "Point", "coordinates": [long, lat]} add_attr(entity, "attr_geo", point) diff --git a/timescale-container/quantumleap-db-setup.py b/timescale-container/quantumleap-db-setup.py index ecb36bbe..9c40faf1 100644 --- a/timescale-container/quantumleap-db-setup.py +++ b/timescale-container/quantumleap-db-setup.py @@ -436,20 +436,25 @@ def run(self): def run(): - sleep(5) # Give Postgres enough time to start in the container. - try: - args = Args().get() - DbBootstrapCmd(args).run() - DbInitCmd(args).run() - DbLoadCmd(args).run() - except CalledProcessError as cpe: - # Rewrite error message to avoid leaking passwords into log files. - msg = 'Command `{0}` did not complete successfully. Exit status: {1}' \ - .format(cpe.cmd[0], cpe.returncode) - print(msg, file=sys.stderr) - if cpe.output is not None: - print(str(cpe.output), file=sys.stderr) - + count = 0 + retry = True + while retry and count < 10: + try: + args = Args().get() + DbBootstrapCmd(args).run() + DbInitCmd(args).run() + DbLoadCmd(args).run() + retry = False + except CalledProcessError as cpe: + # Rewrite error message to avoid leaking passwords into log files. + msg = 'Command `{0}` did not complete successfully. Exit status: {1}' \ + .format(cpe.cmd[0], cpe.returncode) + print(msg, file=sys.stderr) + if cpe.output is not None: + print(str(cpe.output), file=sys.stderr) + count = count + 1 + sleep(5) + if not retry: sys.exit(64)