diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..c62eb0b7cf --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,6 @@ +repos: +- repo: https://github.com/cognitedata/python-pre-commit-hooks + rev: e2944b2 + hooks: + - id: isort + - id: black diff --git a/Jenkinsfile b/Jenkinsfile index 6e9425e140..c04eb78b5e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -49,6 +49,11 @@ podTemplate( stage('Install dependencies') { sh("pipenv sync --dev") } + stage('Check code style & remove typehints') { + sh("pipenv run black -l 120 --check .") + sh("pipenv run python3 type_hint_remover.py") + sh("pipenv run python3 -m black ./cognite -l 120") + } stage('Test and coverage report') { sh("pipenv run pytest --cov-report xml:coverage.xml --cov=cognite --junitxml=test-report.xml || true") junit(allowEmptyResults: true, testResults: '**/test-report.xml') @@ -59,8 +64,6 @@ podTemplate( step([$class: 'CoberturaPublisher', coberturaReportFile: 'coverage.xml']) } stage('Build') { - sh("pipenv run python3 code_parser.py --remove-type-hints --suppress-warning") - sh("pipenv run python3 -m black ./cognite -l 120") sh("python3 setup.py sdist") sh("python3 setup.py bdist_wheel") } diff --git a/Pipfile b/Pipfile index 8602813ca1..81f75d705b 100644 --- a/Pipfile +++ b/Pipfile @@ -20,7 +20,9 @@ twine = "*" pytest = "*" pylint = "*" astunparse = "*" -black = "*" +black = "==18.6b4" +strip-hints = "*" +pre-commit = "*" [requires] python_version = "3.6" diff --git a/Pipfile.lock b/Pipfile.lock index f8a052bc5f..c9e08174d4 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "11aff54daf822d354e6ec1c43fc80380f44eeacc037ac3e186393211f7877c76" + "sha256": "23c0ab8bb195cefa5868277cd76b07c2b1f7b0277ec68417ce49a91498aa6b10" }, "pipfile-spec": 6, "requires": { @@ -28,6 +28,7 @@ "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" ], + "markers": "python_version != '3.0.*' and python_version != '3.1.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.2.*'", "version": "==1.2.1" }, "attrs": { @@ -85,6 +86,7 @@ "sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8", "sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5" ], + "markers": "python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*'", "version": "==1.1.0" }, "jinja2": { @@ -125,6 +127,7 @@ "sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd", "sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1" ], + "markers": "python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.0.*' and python_version != '3.2.*' and python_version != '3.1.*'", "version": "==1.1.0" }, "more-itertools": { @@ -166,6 +169,7 @@ "sha256:ecf81720934a0e18526177e645cbd6a8a21bb0ddc887ff9738de07a1df5c6b61", "sha256:edfa6fba9157e0e3be0f40168eb142511012683ac3dc82420bee4a3f3981b30e" ], + "markers": "python_version != '3.0.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.2.*' and python_version >= '2.7'", "version": "==1.15.4" }, "packaging": { @@ -206,6 +210,7 @@ "sha256:447ba94990e8014ee25ec853339faf7b0fc8050cdc3289d4d71f7f410fb90095", "sha256:bde19360a8ec4dfd8a20dcb811780a30998101f078fc7ded6162f0076f50508f" ], + "markers": "python_version >= '2.7' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", "version": "==0.8.0" }, "protobuf": { @@ -235,28 +240,30 @@ "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" ], + "markers": "python_version >= '2.7' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", "version": "==1.7.0" }, "pygments": { "hashes": [ - "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d", - "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc" + "sha256:6301ecb0997a52d2d31385e62d0a4a4cf18d2f2da7054a5ddad5c366cd39cee7", + "sha256:82666aac15622bd7bb685a4ee7f6625dd716da3ef7473620c192c0168aae64fc" ], - "version": "==2.2.0" + "version": "==2.3.0" }, "pyparsing": { "hashes": [ "sha256:40856e74d4987de5d01761a22d1621ae1c7f8774585acae358aa5c5936c6c90b", "sha256:f353aab21fd474459d97b709e527b5571314ee5f067441dc9f88e33eecd96592" ], + "markers": "python_version != '3.0.*' and python_version != '3.2.*' and python_version >= '2.6' and python_version != '3.1.*'", "version": "==2.3.0" }, "pytest": { "hashes": [ - "sha256:488c842647bbeb350029da10325cb40af0a9c7a2fdda45aeb1dda75b60048ffb", - "sha256:c055690dfefa744992f563e8c3a654089a6aa5b8092dded9b6fafbd70b2e45a7" + "sha256:1d131cc532be0023ef8ae265e2a779938d0619bb6c2510f52987ffcba7fa1ee4", + "sha256:ca4761407f1acc85ffd1609f464ca20bb71a767803505bd4127d0e45c5a50e23" ], - "version": "==4.0.0" + "version": "==4.0.1" }, "pytest-mock": { "hashes": [ @@ -278,6 +285,7 @@ "sha256:30999d1d742ecf6645991a2ce9273188505e98b713ad63be06aabff47dd1b3c4", "sha256:8205cfe7061715de5cd1b37e3565d5b97d0ac13b30ff3ee612554abb6093d640" ], + "markers": "python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.2.*' and python_version != '3.1.*'", "version": "==0.1.8" }, "pytz": { @@ -330,6 +338,7 @@ "sha256:68ca7ff70785cbe1e7bccc71a48b5b6d965d79ca50629606c7861a21b206d9dd", "sha256:9de47f375baf1ea07cdb3436ff39d7a9c76042c10a769c52353ec46e4e8fc3b9" ], + "markers": "python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*'", "version": "==1.1.0" }, "tabulate": { @@ -344,6 +353,7 @@ "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22" ], + "markers": "python_version != '3.0.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.2.*' and python_version < '4' and python_version >= '2.7'", "version": "==1.24.1" } }, @@ -355,12 +365,19 @@ ], "version": "==1.4.3" }, + "aspy.yaml": { + "hashes": [ + "sha256:04d26279513618f1024e1aba46471db870b3b33aef204c2d09bcf93bea9ba13f", + "sha256:0a77e23fafe7b242068ffc0252cee130d3e509040908fc678d9d1060e7494baa" + ], + "version": "==1.1.1" + }, "astroid": { "hashes": [ - "sha256:37f8e89d0e78a649edeb3751b408e96d103e76a1df19d79a0a3b559d0f4f7cd1", - "sha256:39870f07180e50c5a1c73a6de7b7cb487d6db649c0acd9917f154617e09f9e94" + "sha256:35b032003d6a863f5dcd7ec11abd5cd5893428beaa31ab164982403bcb311f22", + "sha256:6a5d668d7dc69110de01cdf7aeec69a679ef486862a0850cc0fd5571505b6b7e" ], - "version": "==2.1.0.dev0" + "version": "==2.1.0" }, "astunparse": { "hashes": [ @@ -375,6 +392,7 @@ "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" ], + "markers": "python_version != '3.0.*' and python_version != '3.1.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.2.*'", "version": "==1.2.1" }, "attrs": { @@ -386,11 +404,11 @@ }, "black": { "hashes": [ - "sha256:817243426042db1d36617910df579a54f1afd659adb96fc5032fcf4b36209739", - "sha256:e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5" + "sha256:22158b89c1a6b4eb333a1e65e791a3f8b998cf3b11ae094adb2570f31f769a44", + "sha256:4b475bbd528acce094c503a3d2dbc2d05a4075f6d0ef7d9e7514518e14cc5191" ], "index": "pypi", - "version": "==18.9b0" + "version": "==18.6b4" }, "bleach": { "hashes": [ @@ -399,6 +417,13 @@ ], "version": "==3.0.2" }, + "cached-property": { + "hashes": [ + "sha256:3a026f1a54135677e7da5ce819b0c690f156f37976f3e30c5430740725203d7f", + "sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504" + ], + "version": "==1.5.1" + }, "certifi": { "hashes": [ "sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c", @@ -406,6 +431,13 @@ ], "version": "==2018.10.15" }, + "cfgv": { + "hashes": [ + "sha256:73f48a752bd7aab103c4b882d6596c6360b7aa63b34073dd2c35c7b4b8f93010", + "sha256:d1791caa9ff5c0c7bce80e7ecc1921752a2eb7c2463a08ed9b6c96b85a2f75aa" + ], + "version": "==1.1.0" + }, "chardet": { "hashes": [ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", @@ -418,39 +450,43 @@ "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" ], + "markers": "python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.3.*' and python_version != '3.2.*' and python_version != '3.1.*'", "version": "==7.0" }, "coverage": { "hashes": [ - "sha256:043d55226aec1d2baf4b2fcab5c204561ccf184a388096f41e396c1c092aff38", - "sha256:10bfd0b80b01d0684f968abbe1186bc19962e07b4b7601bb43b175b617cf689d", - "sha256:17e59864f19b3233032edb0566f26c25cc7f599503fb34d2645b5ce1fd6c2c3c", - "sha256:2105ee183c51fed27e2b6801029b3903f5c2774c78e3f53bd920ca468d0f5679", - "sha256:236505d15af6c7b7bfe2a9485db4b2bdea21d9239351483326184314418c79a8", - "sha256:237284425271db4f30d458b355decf388ab20b05278bdf8dc9a65de0973726c6", - "sha256:26d8eea4c840b73c61a1081d68bceb57b21a2d4f7afda6cac8ac38cb05226b00", - "sha256:39a3740f7721155f4269aedf67b211101c07bd2111b334dfd69b807156ab15d9", - "sha256:4bd0c42db8efc8a60965769796d43a5570906a870bc819f7388860aa72779d1b", - "sha256:4dcddadea47ac30b696956bd18365cd3a86724821656601151e263b86d34798f", - "sha256:51ea341289ac4456db946a25bd644f5635e5ae3793df262813cde875887d25c8", - "sha256:5415cafb082dad78935b3045c2e5d8907f436d15ad24c3fdb8e1839e084e4961", - "sha256:5631f1983074b33c35dbb84607f337b9d7e9808116d7f0f2cb7b9d6d4381d50e", - "sha256:5e9249bc361cd22565fd98590a53fd25a3dd666b74791ed7237fa99de938bbed", - "sha256:6a48746154f1331f28ef9e889c625b5b15a36cb86dd8021b4bdd1180a2186aa5", - "sha256:71d376dbac64855ed693bc1ca121794570fe603e8783cdfa304ec6825d4e768f", - "sha256:749ebd8a615337747592bd1523dfc4af7199b2bf6403b55f96c728668aeff91f", - "sha256:8ec528b585b95234e9c0c31dcd0a89152d8ed82b4567aa62dbcb3e9a0600deee", - "sha256:a1a9ccd879811437ca0307c914f136d6edb85bd0470e6d4966c6397927bcabd9", - "sha256:abd956c334752776230b779537d911a5a12fcb69d8fd3fe332ae63a140301ae6", - "sha256:ad18f836017f2e8881145795f483636564807aaed54223459915a0d4735300cf", - "sha256:b07ac0b1533298ddbc54c9bf3464664895f22899fec027b8d6c8d3ac59023283", - "sha256:d9385f1445e30e8e42b75a36a7899ea1fd0f5784233a626625d70f9b087de404", - "sha256:db2d1fcd32dbeeb914b2660af1838e9c178b75173f95fd221b1f9410b5d3ef1d", - "sha256:e1dec211147f1fd7cb7a0f9a96aeeca467a5af02d38911307b3b8c2324f9917e", - "sha256:e96dffc1fa57bb8c1c238f3d989341a97302492d09cb11f77df031112621c35c", - "sha256:ed4d97eb0ecdee29d0748acd84e6380729f78ce5ba0c7fe3401801634c25a1c5" - ], - "version": "==5.0a3" + "sha256:029c69deaeeeae1b15bc6c59f0ffa28aa8473721c614a23f2c2976dec245cd12", + "sha256:02abbbebc6e9d5abe13cd28b5e963dedb6ffb51c146c916d17b18f141acd9947", + "sha256:1bbfe5b82a3921d285e999c6d256c1e16b31c554c29da62d326f86c173d30337", + "sha256:210c02f923df33a8d0e461c86fdcbbb17228ff4f6d92609fc06370a98d283c2d", + "sha256:2d0807ba935f540d20b49d5bf1c0237b90ce81e133402feda906e540003f2f7a", + "sha256:35d7a013874a7c927ce997350d314144ffc5465faf787bb4e46e6c4f381ef562", + "sha256:3636f9d0dcb01aed4180ef2e57a4e34bb4cac3ecd203c2a23db8526d86ab2fb4", + "sha256:42f4be770af2455a75e4640f033a82c62f3fb0d7a074123266e143269d7010ef", + "sha256:48440b25ba6cda72d4c638f3a9efa827b5b87b489c96ab5f4ff597d976413156", + "sha256:4dac8dfd1acf6a3ac657475dfdc66c621f291b1b7422a939cc33c13ac5356473", + "sha256:4e8474771c69c2991d5eab65764289a7dd450bbea050bc0ebb42b678d8222b42", + "sha256:551f10ddfeff56a1325e5a34eff304c5892aa981fd810babb98bfee77ee2fb17", + "sha256:5b104982f1809c1577912519eb249f17d9d7e66304ad026666cb60a5ef73309c", + "sha256:5c62aef73dfc87bfcca32cee149a1a7a602bc74bac72223236b0023543511c88", + "sha256:633151f8d1ad9467b9f7e90854a7f46ed8f2919e8bc7d98d737833e8938fc081", + "sha256:772207b9e2d5bf3f9d283b88915723e4e92d9a62c83f44ec92b9bd0cd685541b", + "sha256:7d5e02f647cd727afc2659ec14d4d1cc0508c47e6cfb07aea33d7aa9ca94d288", + "sha256:a9798a4111abb0f94584000ba2a2c74841f2cfe5f9254709756367aabbae0541", + "sha256:b38ea741ab9e35bfa7015c93c93bbd6a1623428f97a67083fc8ebd366238b91f", + "sha256:b6a5478c904236543c0347db8a05fac6fc0bd574c870e7970faa88e1d9890044", + "sha256:c6248bfc1de36a3844685a2e10ba17c18119ba6252547f921062a323fb31bff1", + "sha256:c705ab445936457359b1424ef25ccc0098b0491b26064677c39f1d14a539f056", + "sha256:d95a363d663ceee647291131dbd213af258df24f41350246842481ec3709bd33", + "sha256:e27265eb80cdc5dab55a40ef6f890e04ecc618649ad3da5265f128b141f93f78", + "sha256:ebc276c9cb5d917bd2ae959f84ffc279acafa9c9b50b0fa436ebb70bbe2166ea", + "sha256:f4d229866d030863d0fe3bf297d6d11e6133ca15bbb41ed2534a8b9a3d6bd061", + "sha256:f95675bd88b51474d4fe5165f3266f419ce754ffadfb97f10323931fa9ac95e5", + "sha256:f95bc54fb6d61b9f9ff09c4ae8ff6a3f5edc937cda3ca36fc937302a7c152bf1", + "sha256:fd0f6be53de40683584e5331c341e65a679dbe5ec489a0697cec7c2ef1a48cda" + ], + "markers": "python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*' and python_version < '4' and python_version >= '2.7' and python_version != '3.0.*'", + "version": "==5.0a4" }, "docutils": { "hashes": [ @@ -460,6 +496,13 @@ ], "version": "==0.14" }, + "identify": { + "hashes": [ + "sha256:5e956558a9a1e3b3891d7c6609fc9709657a11878af288ace484d1a46a93922b", + "sha256:623086059219cc7b86c77a3891f3700cb175d4ce02b8fb8802b047301d71e783" + ], + "version": "==1.1.7" + }, "idna": { "hashes": [ "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", @@ -467,12 +510,29 @@ ], "version": "==2.7" }, + "importlib-metadata": { + "hashes": [ + "sha256:36b02c84f9001adf65209fefdf951be8e9014a95eab9938c0779ad5670359b1c", + "sha256:60b6481a72908c93ccb707abeb926fb5a15319b9e6f0b76639a718837ee12de0" + ], + "markers": "python_version != '3.1' and python_version != '3.2' and python_version >= '2.7' and python_version != '3.0' and python_version != '3.3'", + "version": "==0.6" + }, + "importlib-resources": { + "hashes": [ + "sha256:6e2783b2538bd5a14678284a3962b0660c715e5a0f10243fd5e00a4b5974f50b", + "sha256:d3279fd0f6f847cced9f7acc19bd3e5df54d34f93a2e7bb5f238f81545787078" + ], + "markers": "python_version < '3.7'", + "version": "==1.0.2" + }, "isort": { "hashes": [ "sha256:1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af", "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", "sha256:ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497" ], + "markers": "python_version >= '2.7' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", "version": "==4.3.4" }, "lazy-object-proxy": { @@ -524,6 +584,12 @@ ], "version": "==4.3.0" }, + "nodeenv": { + "hashes": [ + "sha256:ad8259494cf1c9034539f6cced78a1da4840a4b157e23640bc4a0c0546b0cb7a" + ], + "version": "==1.3.3" + }, "pkginfo": { "hashes": [ "sha256:5878d542a4b3f237e359926384f1dde4e099c9f5525d236b1840cf704fa8d474", @@ -536,29 +602,46 @@ "sha256:447ba94990e8014ee25ec853339faf7b0fc8050cdc3289d4d71f7f410fb90095", "sha256:bde19360a8ec4dfd8a20dcb811780a30998101f078fc7ded6162f0076f50508f" ], + "markers": "python_version >= '2.7' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", "version": "==0.8.0" }, + "pre-commit": { + "hashes": [ + "sha256:7542bd8ae1c58745175ea0a9295964ee82a10f7e18c4344f5e4c02bd85d02561", + "sha256:87f687da6a2651d5067cfec95b854b004e95b70143cbf2369604bb3acbce25ec" + ], + "index": "pypi", + "version": "==1.12.0" + }, "py": { "hashes": [ "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" ], + "markers": "python_version >= '2.7' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*'", "version": "==1.7.0" }, "pygments": { "hashes": [ - "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d", - "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc" + "sha256:6301ecb0997a52d2d31385e62d0a4a4cf18d2f2da7054a5ddad5c366cd39cee7", + "sha256:82666aac15622bd7bb685a4ee7f6625dd716da3ef7473620c192c0168aae64fc" ], - "version": "==2.2.0" + "version": "==2.3.0" }, "pylint": { "hashes": [ - "sha256:1d6d3622c94b4887115fe5204982eee66fdd8a951cf98635ee5caee6ec98c3ec", - "sha256:31142f764d2a7cd41df5196f9933b12b7ee55e73ef12204b648ad7e556c119fb" + "sha256:8e645abc9572749f0256f05db86af81ea2e3d583086cc2b73d241a64ecf571b7", + "sha256:f70e1b78240ba7fea809ecc00fbfbc51615ab531ef3f76f0548072c732358453" ], "index": "pypi", - "version": "==2.1.1" + "version": "==2.2.1" + }, + "pytest": { + "hashes": [ + "sha256:1d131cc532be0023ef8ae265e2a779938d0619bb6c2510f52987ffcba7fa1ee4", + "sha256:ca4761407f1acc85ffd1609f464ca20bb71a767803505bd4127d0e45c5a50e23" + ], + "version": "==4.0.1" }, "pytest-cov": { "hashes": [ @@ -568,6 +651,16 @@ "index": "pypi", "version": "==2.6.0" }, + "pyyaml": { + "hashes": [ + "sha256:254bf6fda2b7c651837acb2c718e213df29d531eebf00edb54743d10bcb694eb", + "sha256:3108529b78577327d15eec243f0ff348a0640b0c3478d67ad7f5648f93bac3e2", + "sha256:3c17fb92c8ba2f525e4b5f7941d850e7a48c3a59b32d331e2502a3cdc6648e76", + "sha256:8d6d96001aa7f0a6a4a95e8143225b5d06e41b1131044913fecb8f85a125714b", + "sha256:c8a88edd93ee29ede719080b2be6cb2333dfee1dccba213b422a9c8e97f2967b" + ], + "version": "==4.2b4" + }, "readme-renderer": { "hashes": [ "sha256:bb16f55b259f27f75f640acf5e00cf897845a8b3e4731b5c1a436e4b8529202f", @@ -597,6 +690,13 @@ ], "version": "==1.11.0" }, + "strip-hints": { + "hashes": [ + "sha256:05026a8282c4649b67a74f41f9ad1fcb492e8912568b36a0f366309cf757e170" + ], + "index": "pypi", + "version": "==0.1.1" + }, "toml": { "hashes": [ "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", @@ -609,6 +709,7 @@ "sha256:3c4d4a5a41ef162dd61f1edb86b0e1c7859054ab656b2e7c7b77e7fbf6d9f392", "sha256:5b4d5549984503050883bc126280b386f5f4ca87e6c023c5d015655ad75bdebb" ], + "markers": "python_version != '3.1.*' and python_version >= '2.6' and python_version != '3.0.*'", "version": "==4.28.1" }, "twine": { @@ -653,8 +754,17 @@ "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22" ], + "markers": "python_version != '3.0.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.2.*' and python_version < '4' and python_version >= '2.7'", "version": "==1.24.1" }, + "virtualenv": { + "hashes": [ + "sha256:686176c23a538ecc56d27ed9d5217abd34644823d6391cbeb232f42bf722baad", + "sha256:f899fafcd92e1150f40c8215328be38ff24b519cd95357fa6e78e006c7638208" + ], + "markers": "python_version != '3.0.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.2.*' and python_version >= '2.7'", + "version": "==16.1.0" + }, "webencodings": { "hashes": [ "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", @@ -664,10 +774,11 @@ }, "wheel": { "hashes": [ - "sha256:196c9842d79262bb66fcf59faa4bd0deb27da911dbc7c6cdca931080eb1f0783", - "sha256:c93e2d711f5f9841e17f53b0e6c0ff85593f3b416b6eec7a9452041a59a42688" + "sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6", + "sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44" ], - "version": "==0.32.2" + "markers": "python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*' and python_version >= '2.7' and python_version != '3.0.*'", + "version": "==0.32.3" }, "wrapt": { "hashes": [ diff --git a/code_parser.py b/code_parser.py deleted file mode 100644 index 29437c35ce..0000000000 --- a/code_parser.py +++ /dev/null @@ -1,66 +0,0 @@ -'''This module is used to remove typehints from source code to ensure python 2.7 compatibility.''' -import argparse -import ast -import os -import re -import sys - -import astunparse - - -class TypeHintRemover(ast.NodeTransformer): - def visit_FunctionDef(self, node): - # remove the return type defintion - node.returns = None - # remove all argument annotations - if node.args.args: - for arg in node.args.args: - arg.annotation = None - return node - - def visit_Import(self, node): - node.names = [n for n in node.names if n.name != 'typing'] - return node if node.names else None - - def visit_ImportFrom(self, node): - return node if node.module != 'typing' else None - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('-th', '--remove-type-hints', action='store_true') - parser.add_argument('--suppress-warning', action='store_true') - - args = parser.parse_args() - - files = [os.path.abspath(os.path.join(os.path.dirname(__file__), 'cognite', file)) for file in - os.listdir('./cognite') if re.match('.+\.py$', file)] - - version_dirs = [os.path.abspath(os.path.join(os.path.dirname(__file__), 'cognite', file)) for file in - os.listdir('./cognite') if re.match('^v\d\d$', file)] - for dir in version_dirs: - version_files = [os.path.abspath(os.path.join(dir, file)) for file in os.listdir(dir) if - re.match('.+\.py$', file)] - files.extend(version_files) - - if not args.suppress_warning: - if input("This will alter the source code of your project and should not be run without knowing what you're " - "doing. Enter 'PEACOCK' to continue: ") != 'PEACOCK': - sys.exit(0) - - for file_path in files: - print(file_path) - - # parse the source code into an AST - with open(file_path, 'r') as f: - parsed_source = ast.parse(f.read()) - - if args.remove_type_hints: - print("*****Removing type hints") - # remove all type annotations, function return type definitions - # and import statements from 'typing' - transformed = TypeHintRemover().visit(parsed_source) - - with open(file_path, 'w') as f: - f.write(astunparse.unparse(transformed)) - print() diff --git a/cognite/__init__.py b/cognite/__init__.py index ebd8350d13..3e92a65762 100644 --- a/cognite/__init__.py +++ b/cognite/__init__.py @@ -18,4 +18,4 @@ # __all__ = ["v04", "v05", "v06", "preprocessing", "config", "data_transfer_service"] -__version__ = "0.11.18" +__version__ = "0.11.19" diff --git a/cognite/_utils.py b/cognite/_utils.py index bd50fbe421..e5e0e7267b 100644 --- a/cognite/_utils.py +++ b/cognite/_utils.py @@ -32,6 +32,7 @@ def serialize(obj): """JSON serializer for objects not serializable by default json code""" return obj.__dict__ + def _raise_API_error(res): x_request_id = res.headers.get("X-Request-Id") code = res.status_code diff --git a/cognite/auxiliary/_hosting/model/model/model.py b/cognite/auxiliary/_hosting/model/model/model.py index 165613f13f..3cf5bcd333 100644 --- a/cognite/auxiliary/_hosting/model/model/model.py +++ b/cognite/auxiliary/_hosting/model/model/model.py @@ -2,6 +2,7 @@ from cognite.data_transfer_service import DataTransferService + class Model: def __init__(self, model): self._model = model diff --git a/cognite/v04/raw.py b/cognite/v04/raw.py index 633d72269e..b06bc5d1fe 100644 --- a/cognite/v04/raw.py +++ b/cognite/v04/raw.py @@ -228,9 +228,7 @@ def create_rows( """ api_key, project = config.get_config_variables(api_key, project) - url = config.get_base_url() + "/api/0.4/projects/{}/raw/{}/{}/create".format( - project, database_name, table_name - ) + url = config.get_base_url() + "/api/0.4/projects/{}/raw/{}/{}/create".format(project, database_name, table_name) headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} if ensure_parent: @@ -270,9 +268,7 @@ def delete_rows( """ api_key, project = config.get_config_variables(api_key, project) - url = config.get_base_url() + "/api/0.4/projects/{}/raw/{}/{}/delete".format( - project, database_name, table_name - ) + url = config.get_base_url() + "/api/0.4/projects/{}/raw/{}/{}/delete".format(project, database_name, table_name) body = {"items": [{"key": "{}".format(row.key), "columns": row.columns} for row in rows]} headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) diff --git a/cognite/v04/timeseries.py b/cognite/v04/timeseries.py index c985419c17..3826c6c64b 100644 --- a/cognite/v04/timeseries.py +++ b/cognite/v04/timeseries.py @@ -152,9 +152,7 @@ def _get_datapoints_helper(tag_id, aggregates=None, granularity=None, start=None list of datapoints: A list containing datapoint dicts. """ api_key, project = kwargs.get("api_key"), kwargs.get("project") - url = config.get_base_url() + "/api/0.4/projects/{}/timeseries/data/{}".format( - project, quote(tag_id, safe="") - ) + url = config.get_base_url() + "/api/0.4/projects/{}/timeseries/data/{}".format(project, quote(tag_id, safe="")) use_protobuf = kwargs.get("protobuf", True) and aggregates is None limit = _constants.LIMIT if aggregates is None else _constants.LIMIT_AGG @@ -274,9 +272,7 @@ def post_datapoints(tag_id, datapoints: List[Datapoint], **kwargs): An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) - url = config.get_base_url() + "/api/0.4/projects/{}/timeseries/data/{}".format( - project, quote(tag_id, safe="") - ) + url = config.get_base_url() + "/api/0.4/projects/{}/timeseries/data/{}".format(project, quote(tag_id, safe="")) headers = {"api-key": api_key, "content-type": "application/json", "accept": "application/json"} @@ -305,9 +301,7 @@ def get_latest(tag_id, **kwargs): output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) - url = config.get_base_url() + "/api/0.4/projects/{}/timeseries/latest/{}".format( - project, quote(tag_id, safe="") - ) + url = config.get_base_url() + "/api/0.4/projects/{}/timeseries/latest/{}".format(project, quote(tag_id, safe="")) headers = {"api-key": api_key, "accept": "application/json"} res = _utils.get_request(url, headers=headers, cookies=config.get_cookies()) return LatestDatapointResponse(res.json()) diff --git a/cognite/v05/raw.py b/cognite/v05/raw.py index 859bcb88c0..db36f5324f 100644 --- a/cognite/v05/raw.py +++ b/cognite/v05/raw.py @@ -228,9 +228,7 @@ def create_rows( """ api_key, project = config.get_config_variables(api_key, project) - url = config.get_base_url() + "/api/0.5/projects/{}/raw/{}/{}/create".format( - project, database_name, table_name - ) + url = config.get_base_url() + "/api/0.5/projects/{}/raw/{}/{}/create".format(project, database_name, table_name) headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} if ensure_parent: @@ -270,9 +268,7 @@ def delete_rows( """ api_key, project = config.get_config_variables(api_key, project) - url = config.get_base_url() + "/api/0.5/projects/{}/raw/{}/{}/delete".format( - project, database_name, table_name - ) + url = config.get_base_url() + "/api/0.5/projects/{}/raw/{}/{}/delete".format(project, database_name, table_name) body = {"items": [{"key": "{}".format(row.key), "columns": row.columns} for row in rows]} headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) diff --git a/cognite/v06/dto.py b/cognite/v06/dto.py index 6afdbea776..1e9a4127dc 100644 --- a/cognite/v06/dto.py +++ b/cognite/v06/dto.py @@ -3,9 +3,10 @@ This module contains data objects used to represent the data returned from the API. """ -import pandas as pd from typing import List +import pandas as pd + class Column: """Data transfer object for a column. @@ -18,20 +19,7 @@ class Column: metadata (dict): Custom, application specific metadata. String key -> String Value. """ - id: int - name: str - externalId: str - valueType: str - metadata: dict - - def __init__( - self, - id: int, - name: str, - external_id: str, - value_type: str, - metadata: dict - ): + def __init__(self, id: int, name: str, external_id: str, value_type: str, metadata: dict): self.id = id self.name = name self.externalId = external_id @@ -41,11 +29,11 @@ def __init__( @staticmethod def from_JSON(the_column: dict): return Column( - id=the_column['id'], - name=the_column['name'], - external_id=the_column.get('externalId', None), - value_type=the_column['valueType'], - metadata=the_column['metadata'] + id=the_column["id"], + name=the_column["name"], + external_id=the_column.get("externalId", None), + value_type=the_column["valueType"], + metadata=the_column["metadata"], ) @@ -62,23 +50,15 @@ class Sequence: metadata (dict): Custom, application specific metadata. String key -> String Value. """ - id: int - name: str - externalId: str - assetId: int - columns: List[Column] - description: str - metadata: dict - def __init__( - self, - id: int, - name: str, - external_id: str, - asset_id: int, - columns: List[Column], - description: str, - metadata: dict + self, + id: int, + name: str, + external_id: str, + asset_id: int, + columns: List[Column], + description: str, + metadata: dict, ): self.id = id self.name = name @@ -91,16 +71,13 @@ def __init__( @staticmethod def from_JSON(the_sequence: dict): return Sequence( - id=the_sequence['id'], - name=the_sequence['name'], - external_id=the_sequence.get('externalId', None), - asset_id=the_sequence.get('assetId', None), - columns=[ - Column.from_JSON(the_column) - for the_column in the_sequence['columns'] - ], - description=the_sequence['description'], - metadata=the_sequence['metadata'] + id=the_sequence["id"], + name=the_sequence["name"], + external_id=the_sequence.get("externalId", None), + asset_id=the_sequence.get("assetId", None), + columns=[Column.from_JSON(the_column) for the_column in the_sequence["columns"]], + description=the_sequence["description"], + metadata=the_sequence["metadata"], ) @@ -112,23 +89,13 @@ class RowValue: value (str): The actual value. """ - columnId: int - value: str # Can be either string, float, or boolean - - def __init__( - self, - column_id: int, - value: str - ): + def __init__(self, column_id: int, value: str): self.columnId = column_id self.value = value @staticmethod def from_JSON(the_row_value: dict): - return RowValue( - column_id=the_row_value['columnId'], - value=the_row_value['value'] - ) + return RowValue(column_id=the_row_value["columnId"], value=the_row_value["value"]) class Row: @@ -138,29 +105,20 @@ class Row: row_number (int): The row number for this row. values (list): The values in this row. """ - rowNumber: int - values: List[RowValue] - def __init__( - self, - row_number: int, - values: List[RowValue] - ): + def __init__(self, row_number: int, values: List[RowValue]): self.rowNumber = row_number self.values = values @staticmethod def from_JSON(the_row: dict): return Row( - row_number=the_row['rowNumber'], - values=[ - RowValue.from_JSON(the_row_value) - for the_row_value in the_row['values'] - ] + row_number=the_row["rowNumber"], + values=[RowValue.from_JSON(the_row_value) for the_row_value in the_row["values"]], ) def get_row_as_csv(self): - return ','.join([str(x.value) for x in self.values]) + return ",".join([str(x.value) for x in self.values]) class SequenceDataResponse: @@ -170,22 +128,12 @@ class SequenceDataResponse: rows (list): List of rows with the data. """ - rows: List[Row] - - def __init__( - self, - rows: List[Row] - ): + def __init__(self, rows: List[Row]): self.rows = rows @staticmethod def from_JSON(the_data: dict): - return SequenceDataResponse( - rows=[ - Row.from_JSON(the_row) - for the_row in the_data['rows'] - ] - ) + return SequenceDataResponse(rows=[Row.from_JSON(the_row) for the_row in the_data["rows"]]) @staticmethod def _row_has_value_for_column(row: Row, column_id: int): @@ -200,22 +148,16 @@ def to_pandas(self): # Create the empty dataframe column_ids = [value.columnId for value in self.rows[0].values] - my_df = pd.DataFrame( - columns=column_ids - ) + my_df = pd.DataFrame(columns=column_ids) # Fill the dataframe with values. We might not have data for every column, so we need to be careful for row in self.rows: data_this_row: List[float] = [] for column_id in column_ids: # Do we have a value for this column? if self._row_has_value_for_column(row, column_id): - data_this_row.append( - self._get_value_for_column(row, column_id) - ) + data_this_row.append(self._get_value_for_column(row, column_id)) else: - data_this_row.append( - 'null' - ) + data_this_row.append("null") my_df.loc[len(my_df)] = data_this_row return my_df @@ -234,18 +176,7 @@ class SequenceDataRequest: column_ids (List[int]): ids of the columns to get data for. """ - inclusiveFrom: int - inclusiveTo: int - limit: int = 100 - columnIds: List[int] = [] - - def __init__( - self, - inclusive_from: int, - inclusive_to: int, - limit: int = 100, - column_ids: List[int] = None - ): + def __init__(self, inclusive_from: int, inclusive_to: int, limit: int = 100, column_ids: List[int] = None): self.inclusiveFrom = inclusive_from self.inclusiveTo = inclusive_to self.limit = limit diff --git a/cognite/v06/sequences.py b/cognite/v06/sequences.py index bab089c755..bc133bd53c 100644 --- a/cognite/v06/sequences.py +++ b/cognite/v06/sequences.py @@ -12,10 +12,7 @@ from cognite.v06.dto import Sequence, SequenceDataRequest, SequenceDataResponse, Row -def post_sequences( - sequences: List[Sequence], - **kwargs -): +def post_sequences(sequences: List[Sequence], **kwargs): """Create a new time series. Args: @@ -46,15 +43,12 @@ def post_sequences( res = _utils.post_request(url, body=body, headers=headers) json_response = json.loads(res.text) - the_sequence: dict = json_response['data']['items'][0] + the_sequence: dict = json_response["data"]["items"][0] return Sequence.from_JSON(the_sequence) -def get_sequence_by_id( - id: int, - **kwargs -): +def get_sequence_by_id(id: int, **kwargs): """Returns a Sequence object containing the requested sequence. Args: @@ -75,15 +69,12 @@ def get_sequence_by_id( res = _utils.get_request(url=url, headers=headers, cookies=config.get_cookies()) json_response = json.loads(res.text) - the_sequence: dict = json_response['data']['items'][0] + the_sequence: dict = json_response["data"]["items"][0] return Sequence.from_JSON(the_sequence) -def get_sequence_by_external_id( - external_id: str, - **kwargs -): +def get_sequence_by_external_id(external_id: str, **kwargs): """Returns a Sequence object containing the requested sequence. Args: @@ -105,15 +96,12 @@ def get_sequence_by_external_id( res = _utils.get_request(url=url, params=params, headers=headers, cookies=config.get_cookies()) json_response = json.loads(res.text) - the_sequence: dict = json_response['data']['items'][0] + the_sequence: dict = json_response["data"]["items"][0] return Sequence.from_JSON(the_sequence) -def delete_sequence_by_id( - id: int, - **kwargs -): +def delete_sequence_by_id(id: int, **kwargs): """Deletes the sequence with the given id. Args: @@ -134,11 +122,7 @@ def delete_sequence_by_id( return res.json() -def post_data_to_sequence( - id: int, - rows: List[Row], - **kwargs -): +def post_data_to_sequence(id: int, rows: List[Row], **kwargs): """Posts data to a sequence. Args: @@ -155,13 +139,7 @@ def post_data_to_sequence( api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url() + "/api/0.6/projects/{}/sequences/{}/postdata".format(project, id) - body = { - "items": [ - { - "rows": [row.__dict__ for row in rows] - } - ] - } + body = {"items": [{"rows": [row.__dict__ for row in rows]}]} headers = {"api-key": api_key, "content-type": "application/json", "accept": "application/json"} @@ -171,12 +149,7 @@ def post_data_to_sequence( def get_data_from_sequence( - id: int, - inclusive_from: int, - inclusive_to: int, - limit: int = 100, - column_ids: List[int] = None, - **kwargs + id: int, inclusive_from: int, inclusive_to: int, limit: int = 100, column_ids: List[int] = None, **kwargs ): """Gets data from the given sequence. @@ -200,21 +173,14 @@ def get_data_from_sequence( headers = {"api-key": api_key, "accept": "application/json", "Content-Type": "application/json"} sequenceDataRequest: SequenceDataRequest = SequenceDataRequest( - inclusive_from=inclusive_from, - inclusive_to=inclusive_to, - limit=limit, - column_ids=column_ids or [] + inclusive_from=inclusive_from, inclusive_to=inclusive_to, limit=limit, column_ids=column_ids or [] ) - body = { - "items": [ - sequenceDataRequest.__dict__ - ] - } + body = {"items": [sequenceDataRequest.__dict__]} res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) json_response = json.loads(res.text) - the_data: dict = json_response['data']['items'][0] + the_data: dict = json_response["data"]["items"][0] return SequenceDataResponse.from_JSON(the_data) diff --git a/examples/analytics/model_hosting_scheduled_predict/prod_rate/prod_rate/model.py b/examples/analytics/model_hosting_scheduled_predict/prod_rate/prod_rate/model.py index 8b90a3fdca..7cddc046e1 100644 --- a/examples/analytics/model_hosting_scheduled_predict/prod_rate/prod_rate/model.py +++ b/examples/analytics/model_hosting_scheduled_predict/prod_rate/prod_rate/model.py @@ -2,6 +2,7 @@ from sklearn.ensemble import RandomForestRegressor from cognite.data_transfer_service import DataTransferService + class Model: """ You need to have a class called Model in a file called model.py at the @@ -19,6 +20,7 @@ class that are ready for predictions. Which use the persisted state to do predictions. """ + @staticmethod def train(file_io, data_spec, api_key, project, **kwargs): """ @@ -41,7 +43,7 @@ def train(file_io, data_spec, api_key, project, **kwargs): X = df[["temp", "pressure", "rpm"]].values y = df["production_rate"].values - regressor = RandomForestRegressor(n_estimators=10, min_samples_split=100) # We'll mostly use default settings + regressor = RandomForestRegressor(n_estimators=10, min_samples_split=100) # We'll mostly use default settings regressor.fit(X, y) # Persist our regressor model @@ -61,7 +63,7 @@ def load(file_io): with file_io("regressor.pickle", "rb") as f: regressor = pickle.load(f) return Model(regressor) - + def predict(self, instance, api_key, project, **kwargs): """ instance: diff --git a/examples/analytics/model_hosting_scheduled_predict/prod_rate/setup.py b/examples/analytics/model_hosting_scheduled_predict/prod_rate/setup.py index 57dc57f530..c184c91785 100644 --- a/examples/analytics/model_hosting_scheduled_predict/prod_rate/setup.py +++ b/examples/analytics/model_hosting_scheduled_predict/prod_rate/setup.py @@ -13,5 +13,5 @@ description="A random forrest regressor used to find production rate for abc equipment", url="https://relevant.webpage", maintainer="Tutorial", - maintainer_email="Tutorial" + maintainer_email="Tutorial", ) diff --git a/examples/analytics/model_hosting_simple_train_predict/linreg/linreg/model.py b/examples/analytics/model_hosting_simple_train_predict/linreg/linreg/model.py index 62e9a2a470..5d96d73ed1 100644 --- a/examples/analytics/model_hosting_simple_train_predict/linreg/linreg/model.py +++ b/examples/analytics/model_hosting_simple_train_predict/linreg/linreg/model.py @@ -2,6 +2,7 @@ import numpy as np from cognite.data_transfer_service import DataTransferService + class Model: """ You need to have a class called Model in a file called model.py at the @@ -19,6 +20,7 @@ class that are ready for predictions. Which use the persisted state to do predictions. """ + @staticmethod def train(file_io, data_spec, api_key, project, **kwargs): """ @@ -43,10 +45,7 @@ def train(file_io, data_spec, api_key, project, **kwargs): X.insert(0, "f0", 1) # Least squares - coefficients = pd.DataFrame( - np.linalg.inv(X.T.dot(X)).dot(X.T).dot(y), - columns=["beta_hat"] - ) + coefficients = pd.DataFrame(np.linalg.inv(X.T.dot(X)).dot(X.T).dot(y), columns=["beta_hat"]) # Persist our result with file_io("coefficients.csv", "w") as f: @@ -65,7 +64,7 @@ def load(file_io): with file_io("coefficients.csv", "r") as f: coefficients = pd.read_csv(f) return Model(coefficients) - + def predict(self, instance, precision=2, **kwargs): """ instance: diff --git a/examples/analytics/model_hosting_simple_train_predict/linreg/setup.py b/examples/analytics/model_hosting_simple_train_predict/linreg/setup.py index c3e8f5f579..89de36d913 100644 --- a/examples/analytics/model_hosting_simple_train_predict/linreg/setup.py +++ b/examples/analytics/model_hosting_simple_train_predict/linreg/setup.py @@ -13,5 +13,5 @@ description="A simple linear regression model for a tutorial", url="https://relevant.webpage", maintainer="Tutorial", - maintainer_email="Tutorial" + maintainer_email="Tutorial", ) diff --git a/examples/basics/openindustrialdata.py b/examples/basics/openindustrialdata.py index e6dadff6fa..f58bbd2a50 100644 --- a/examples/basics/openindustrialdata.py +++ b/examples/basics/openindustrialdata.py @@ -5,12 +5,12 @@ from cognite.v05.timeseries import get_datapoints_frame # Set API key and project for current session. The project is Open Industrial Data. -configure_session(api_key=os.getenv('COGNITE_API_KEY'), project='publicdata') +configure_session(api_key=os.getenv("COGNITE_API_KEY"), project="publicdata") # Retrieve one year of daily aggregates for a time series -ts = 'VAL_23-PT-92512:X.Value' -dataframe = get_datapoints_frame([ts], start='52w-ago', aggregates=['avg','min', 'max'], granularity='1d', processes=1) +ts = "VAL_23-PT-92512:X.Value" +dataframe = get_datapoints_frame([ts], start="52w-ago", aggregates=["avg", "min", "max"], granularity="1d", processes=1) # Plot the dataframe -dataframe.plot(x='timestamp') +dataframe.plot(x="timestamp") plt.show() diff --git a/examples/sklearn/svm/model/svm.py b/examples/sklearn/svm/model/svm.py index 2d95b5a272..215389185c 100644 --- a/examples/sklearn/svm/model/svm.py +++ b/examples/sklearn/svm/model/svm.py @@ -9,12 +9,14 @@ configure_session(os.environ.get("COGNITE_API_KEY"), "akerbp") -tag_ids = ["SKAP_18ESV2113/BCH/10sSamp", - {"tagId": "SKAP_18PI2101/Y/10sSAMP", "aggregates": ["avg"]}, - {"tagId": "SKAP_18PI2117/Y/10sSAMP", "aggregates": ["avg"]}] +tag_ids = [ + "SKAP_18ESV2113/BCH/10sSamp", + {"tagId": "SKAP_18PI2101/Y/10sSAMP", "aggregates": ["avg"]}, + {"tagId": "SKAP_18PI2117/Y/10sSAMP", "aggregates": ["avg"]}, +] target_vars = ["SKAP_18PI2117/Y/10sSAMP"] -df = fill_nan(get_datapoints_frame(tag_ids, aggregates=['step'], granularity="1d", start='50w-ago')) +df = fill_nan(get_datapoints_frame(tag_ids, aggregates=["step"], granularity="1d", start="50w-ago")) y_labels = [label for label in list(df.columns) if any([label.startswith(var_name) for var_name in target_vars])] @@ -22,7 +24,7 @@ print() -X = df.drop(['timestamp'] + y_labels, axis=1).values +X = df.drop(["timestamp"] + y_labels, axis=1).values print(X.shape) y = df.drop(X_labels, axis=1).values.reshape(X.shape[0]) @@ -36,7 +38,7 @@ print(classifier.predict(X)) # Export the classifier to a file -joblib.dump(classifier, os.path.abspath(os.path.dirname(__file__)) + '/model.joblib') +joblib.dump(classifier, os.path.abspath(os.path.dirname(__file__)) + "/model.joblib") # Equivalently, you can use the pickle library to export the model similar to: # import pickle @@ -47,4 +49,3 @@ # The exact file name of of the exported model you upload to GCS is important! # Your model must be named model.joblib, model.pkl, or model.bst with respect to # the library you used to export it. - diff --git a/examples/sklearn/svm/setup.py b/examples/sklearn/svm/setup.py index 4724ebde21..8e4b3f8d48 100644 --- a/examples/sklearn/svm/setup.py +++ b/examples/sklearn/svm/setup.py @@ -1,7 +1,9 @@ from setuptools import setup, find_packages -setup(name='model', - version='0.1', - packages=find_packages(), - description='SVM sklearn GMLE CMHE', - package_data={'model': ['model.*', 'processing_requirements.txt']}) +setup( + name="model", + version="0.1", + packages=find_packages(), + description="SVM sklearn GMLE CMHE", + package_data={"model": ["model.*", "processing_requirements.txt"]}, +) diff --git a/setup.py b/setup.py index ce0b5603bd..cb6ef2d7d6 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ author_email="erlend.vollset@cognite.com", packages=packages, install_requires=["requests", "pandas", "protobuf", "cognite-logger>=0.3", "tabulate"], - python_requires=">=3.3", + python_requires=">=3.5", zip_safe=False, include_package_data=True, ) diff --git a/tests/test_preprocessing.py b/tests/test_preprocessing.py index f614993f4b..ec90c825ff 100644 --- a/tests/test_preprocessing.py +++ b/tests/test_preprocessing.py @@ -5,28 +5,28 @@ from cognite import preprocessing -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def dfs_no_nan(): df1 = pd.DataFrame([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]) df2 = pd.DataFrame([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]) df3 = pd.DataFrame([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]) - df1['timestamp'] = df1.index * 2000 - df2['timestamp'] = df2.index * 5000 - df3['timestamp'] = df3.index * 10000 + df1["timestamp"] = df1.index * 2000 + df2["timestamp"] = df2.index * 5000 + df3["timestamp"] = df3.index * 10000 return df1, df2, df3 -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def df_with_zero_var_column(): return pd.DataFrame([[1, 2, 3], [1, 4, 5], [1, 6, 7]]) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def df_with_nan(): return pd.DataFrame([[1, 2, 3, 4], [5, 6, None, 8], [None, 10, 11, 12]]) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def df_with_leading_nan(): return pd.DataFrame([[None, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]) @@ -35,7 +35,7 @@ class TestNormalize: def test_zero_mean(self, dfs_no_nan): df = dfs_no_nan[0] norm_df = preprocessing.normalize(df) - assert (norm_df.drop('timestamp', axis=1).mean() == 0).all() + assert (norm_df.drop("timestamp", axis=1).mean() == 0).all() class TestMergeDataframes: @@ -63,7 +63,7 @@ def test_when_df_is_none(self, dfs_no_nan): class TestEvenIndex: def test_even_index(self, dfs_no_nan): df = dfs_no_nan[0] - df['timestamp'] = [1000, 3000, 8000] + df["timestamp"] = [1000, 3000, 8000] even_index_df = preprocessing.make_index_even(df) timestamps = even_index_df.timestamp.values even_deltas = np.diff(timestamps, 1) == 1000 @@ -81,11 +81,11 @@ def test_has_leading_nan(self, df_with_leading_nan): assert has_leading_nan, "Leading Nans removed" -class TestRemoveNanColumns(): +class TestRemoveNanColumns: @pytest.fixture def df_nan_col_removed(self, df_with_nan): df = df_with_nan - df['timestamp'] = df.index * 1000 + df["timestamp"] = df.index * 1000 col_removed_df, mask = preprocessing.remove_nan_columns(df) return col_removed_df, mask @@ -98,11 +98,11 @@ def test_correct_column_removed(self, df_nan_col_removed): assert 0 not in df_nan_col_removed[0].columns -class TestRemoveZeroVarColumns(): +class TestRemoveZeroVarColumns: @pytest.fixture def df_zero_var_removed(self, df_with_zero_var_column): df = df_with_zero_var_column - df['timestamp'] = df.index * 1000 + df["timestamp"] = df.index * 1000 col_removed_df, mask = preprocessing.remove_zero_variance_columns(df) return col_removed_df, mask @@ -115,11 +115,11 @@ def test_correct_column_removed(self, df_zero_var_removed): assert 0 not in df_zero_var_removed[0].columns -class TestPreprocess(): - @pytest.fixture(scope='class') +class TestPreprocess: + @pytest.fixture(scope="class") def df_nans_uneven(self): df = pd.DataFrame([[None, 2, 3, 4], [5, None, 7, 8], [9, 10, 11, 12]]) - df['timestamp'] = [1000, 3000, 8000] + df["timestamp"] = [1000, 3000, 8000] return df def test_preprocess(self, df_nans_uneven): @@ -139,4 +139,4 @@ def test_preprocess_remove_leading_nan(self, df_nans_uneven): def test_preprocess_center_and_scale(self, df_nans_uneven): pp_df, mask = preprocessing.preprocess(df_nans_uneven, center_and_scale=True, remove_leading_nan_rows=True) - assert (pp_df.drop('timestamp', axis=1).mean().round() == 0).all() + assert (pp_df.drop("timestamp", axis=1).mean().round() == 0).all() diff --git a/tests/v04/test_assets.py b/tests/v04/test_assets.py index 6532916d23..fae91a0ee5 100644 --- a/tests/v04/test_assets.py +++ b/tests/v04/test_assets.py @@ -3,21 +3,22 @@ from cognite.v04 import assets from cognite.v04.dto import Asset, AssetResponse -ASSET_NAME = 'test_asset' +ASSET_NAME = "test_asset" -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def get_asset_subtree_response(): return assets.get_asset_subtree(limit=1) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def get_assets_response(): return assets.get_assets(limit=1) def test_get_assets_response_object(get_assets_response): from cognite.v04.dto import AssetResponse + assert isinstance(get_assets_response, AssetResponse) assert get_assets_response.next_cursor() is not None assert get_assets_response.previous_cursor() is None @@ -25,6 +26,7 @@ def test_get_assets_response_object(get_assets_response): def test_asset_subtree_object(get_asset_subtree_response): from cognite.v04.dto import AssetResponse + assert isinstance(get_asset_subtree_response, AssetResponse) assert get_asset_subtree_response.next_cursor() is not None assert get_asset_subtree_response.previous_cursor() is None @@ -36,11 +38,13 @@ def test_json(get_asset_subtree_response): def test_pandas(get_asset_subtree_response): import pandas as pd + assert isinstance(get_asset_subtree_response.to_pandas(), pd.DataFrame) def test_ndarray(get_asset_subtree_response): import numpy as np + assert isinstance(get_asset_subtree_response.to_ndarray(), np.ndarray) @@ -48,13 +52,13 @@ def test_post_assets(): a1 = Asset(name=ASSET_NAME) res = assets.post_assets([a1]) assert isinstance(res, AssetResponse) - assert res.to_json()[0]['name'] == ASSET_NAME - assert res.to_json()[0].get('id') != None + assert res.to_json()[0]["name"] == ASSET_NAME + assert res.to_json()[0].get("id") != None def test_delete_assets(): asset = assets.get_assets(ASSET_NAME, depth=0) - id = asset.to_json()[0]['id'] + id = asset.to_json()[0]["id"] res = assets.delete_assets([id]) assert res == {} assert len(assets.get_assets(ASSET_NAME, depth=0).to_json()) == 0 diff --git a/tests/v04/test_cloud_storage.py b/tests/v04/test_cloud_storage.py index 800d9e3d1e..b1ecd58bf0 100644 --- a/tests/v04/test_cloud_storage.py +++ b/tests/v04/test_cloud_storage.py @@ -8,22 +8,23 @@ def test_upload_file_metadata(): - response = cloud_storage.upload_file('test_file', source='sdk-tests', overwrite=True) - assert response.get('uploadURL') is not None - assert response.get('fileId') is not None + response = cloud_storage.upload_file("test_file", source="sdk-tests", overwrite=True) + assert response.get("uploadURL") is not None + assert response.get("fileId") is not None def test_upload_file(tmpdir): - file_path = os.path.join(tmpdir, 'test_file.txt') - tmpdir.join('test_file.txt').write("This is a test file.") + file_path = os.path.join(tmpdir, "test_file.txt") + tmpdir.join("test_file.txt").write("This is a test file.") with pytest.warns(UserWarning): - response = cloud_storage.upload_file('test_file', file_path, source='sdk-tests', overwrite=True) - assert response.get('uploadURL') is None - assert response.get('fileId') is not None + response = cloud_storage.upload_file("test_file", file_path, source="sdk-tests", overwrite=True) + assert response.get("uploadURL") is None + assert response.get("fileId") is not None def test_list_files(): from cognite.v04.dto import FileListResponse + response = cloud_storage.list_files(limit=3) assert isinstance(response, FileListResponse) assert isinstance(response.to_pandas(), pd.DataFrame) @@ -33,19 +34,20 @@ def test_list_files(): def test_list_files_empty(): - response = cloud_storage.list_files(source='not_a_source') + response = cloud_storage.list_files(source="not_a_source") assert response.to_pandas().empty assert len(response.to_json()) == 0 -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def file_id(): - res = cloud_storage.list_files(name='test_file', source='sdk-tests', limit=1) - return res.to_json()[0]['id'] + res = cloud_storage.list_files(name="test_file", source="sdk-tests", limit=1) + return res.to_json()[0]["id"] def test_get_file_info(file_id): from cognite.v04.dto import FileInfoResponse + response = cloud_storage.get_file_info(file_id) assert isinstance(response, FileInfoResponse) assert isinstance(response.to_json(), dict) @@ -54,7 +56,7 @@ def test_get_file_info(file_id): assert response.id == file_id -@pytest.mark.parametrize('get_contents', [True, False]) +@pytest.mark.parametrize("get_contents", [True, False]) def test_download_files(file_id, get_contents): try: response = cloud_storage.download_file(file_id, get_contents) @@ -68,4 +70,4 @@ def test_download_files(file_id, get_contents): def test_delete_file(file_id): response = cloud_storage.delete_files([file_id]) - assert file_id in response['deleted'] or file_id in response['failed'] + assert file_id in response["deleted"] or file_id in response["failed"] diff --git a/tests/v04/test_raw.py b/tests/v04/test_raw.py index 658b73e247..cefd1173dd 100644 --- a/tests/v04/test_raw.py +++ b/tests/v04/test_raw.py @@ -14,39 +14,39 @@ ROW_COLUMNS = None -@pytest.fixture(autouse=True, scope='class') +@pytest.fixture(autouse=True, scope="class") def db_name(): global DB_NAME - DB_NAME = 'test_db_{}'.format(randint(1, 2 ** 53 - 1)) + DB_NAME = "test_db_{}".format(randint(1, 2 ** 53 - 1)) -@pytest.fixture(autouse=True, scope='class') +@pytest.fixture(autouse=True, scope="class") def table_name(): global TABLE_NAME - TABLE_NAME = 'test_table_{}'.format(randint(1, 2 ** 53 - 1)) + TABLE_NAME = "test_table_{}".format(randint(1, 2 ** 53 - 1)) -@pytest.fixture(autouse=True, scope='class') +@pytest.fixture(autouse=True, scope="class") def row_key(): global ROW_KEY - ROW_KEY = 'test_key_{}'.format(randint(1, 2 ** 53 - 1)) + ROW_KEY = "test_key_{}".format(randint(1, 2 ** 53 - 1)) -@pytest.fixture(autouse=True, scope='class') +@pytest.fixture(autouse=True, scope="class") def row_columns(): global ROW_COLUMNS - ROW_COLUMNS = {'col1': 'val1'} + ROW_COLUMNS = {"col1": "val1"} class TestDatabases: - @pytest.fixture(scope='class') + @pytest.fixture(scope="class") def databases(self): yield raw.get_databases() def test_create_databases(self): response = raw.create_databases([DB_NAME]) assert isinstance(response, RawResponse) - assert response.to_json()[0]['dbName'] == DB_NAME + assert response.to_json()[0]["dbName"] == DB_NAME def test_databases_response_length(self, databases): assert len(databases.to_json()) > 0 @@ -65,20 +65,20 @@ def test_delete_databases(self): class TestTables: - @pytest.fixture(autouse=True, scope='class') + @pytest.fixture(autouse=True, scope="class") def create_database(self): raw.create_databases([DB_NAME]) yield raw.delete_databases([DB_NAME], recursive=True) - @pytest.fixture(scope='class') + @pytest.fixture(scope="class") def tables(self): yield raw.get_tables(DB_NAME) def test_create_tables(self): response = raw.create_tables(DB_NAME, [TABLE_NAME]) # assert isinstance(response, RawObject) - assert response.to_json()[0]['tableName'] == TABLE_NAME + assert response.to_json()[0]["tableName"] == TABLE_NAME def test_tables_response_length(self, tables): assert len(tables.to_json()) > 0 @@ -99,7 +99,7 @@ def test_delete_tables(self): class TestRows: - @pytest.fixture(autouse=True, scope='class') + @pytest.fixture(autouse=True, scope="class") def create_database(self): raw.create_databases([DB_NAME]) raw.create_tables(DB_NAME, [TABLE_NAME]) diff --git a/tests/v05/test_dto.py b/tests/v05/test_dto.py index 723bfa781f..338f05b3c3 100644 --- a/tests/v05/test_dto.py +++ b/tests/v05/test_dto.py @@ -5,21 +5,21 @@ from cognite.v05.dto import EventResponse, EventListResponse, TimeSeriesResponse, FileInfoResponse -@pytest.fixture(scope='module', params=['ts', 'file', 'event', 'eventlist']) +@pytest.fixture(scope="module", params=["ts", "file", "event", "eventlist"]) def get_response_obj(request): - TS_INTERNAL_REPR = {'data': {'items': [{'name': '0', 'metadata': {'md1': 'val1'}}]}} - EVENT_LIST_INTERNAL_REPR = {'data': {'items': [{'id': 0, 'metadata': {'md1': 'val1'}}]}} - EVENT_INTERNAL_REPR = {'data': {'items': [{'id': 0, 'metadata': {'md1': 'val1'}, 'assetIds': []}]}} - FILE_INFO_INTERNAL_REPR = {'data': {'items': [{'id': 0, 'metadata': {'md1': 'val1'}}]}} + TS_INTERNAL_REPR = {"data": {"items": [{"name": "0", "metadata": {"md1": "val1"}}]}} + EVENT_LIST_INTERNAL_REPR = {"data": {"items": [{"id": 0, "metadata": {"md1": "val1"}}]}} + EVENT_INTERNAL_REPR = {"data": {"items": [{"id": 0, "metadata": {"md1": "val1"}, "assetIds": []}]}} + FILE_INFO_INTERNAL_REPR = {"data": {"items": [{"id": 0, "metadata": {"md1": "val1"}}]}} response = None - if request.param == 'ts': + if request.param == "ts": response = TimeSeriesResponse(TS_INTERNAL_REPR) - elif request.param == 'file': + elif request.param == "file": response = FileInfoResponse(FILE_INFO_INTERNAL_REPR) - elif request.param == 'eventlist': + elif request.param == "eventlist": response = EventListResponse(EVENT_LIST_INTERNAL_REPR) - elif request.param == 'event': + elif request.param == "event": response = EventResponse(EVENT_INTERNAL_REPR) yield response diff --git a/tests/v05/test_files.py b/tests/v05/test_files.py index b760ff633c..6d49183a19 100644 --- a/tests/v05/test_files.py +++ b/tests/v05/test_files.py @@ -8,22 +8,23 @@ def test_upload_file_metadata(): - response = files.upload_file('test_file', source='sdk-tests', overwrite=True) - assert response.get('uploadURL') is not None - assert response.get('fileId') is not None + response = files.upload_file("test_file", source="sdk-tests", overwrite=True) + assert response.get("uploadURL") is not None + assert response.get("fileId") is not None def test_upload_file(tmpdir): - file_path = os.path.join(tmpdir, 'test_file.txt') - tmpdir.join('test_file.txt').write("This is a test file.") + file_path = os.path.join(tmpdir, "test_file.txt") + tmpdir.join("test_file.txt").write("This is a test file.") with pytest.warns(UserWarning): - response = files.upload_file('test_file', file_path, source='sdk-tests', overwrite=True) - assert response.get('uploadURL') is None - assert response.get('fileId') is not None + response = files.upload_file("test_file", file_path, source="sdk-tests", overwrite=True) + assert response.get("uploadURL") is None + assert response.get("fileId") is not None def test_list_files(): from cognite.v05.dto import FileListResponse + response = files.list_files(limit=3) assert isinstance(response, FileListResponse) assert isinstance(response.to_pandas(), pd.DataFrame) @@ -33,19 +34,20 @@ def test_list_files(): def test_list_files_empty(): - response = files.list_files(source='not_a_source') + response = files.list_files(source="not_a_source") assert response.to_pandas().empty assert len(response.to_json()) == 0 -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def file_id(): - res = files.list_files(name='test_file', source='sdk-tests', limit=1) - return res.to_json()[0]['id'] + res = files.list_files(name="test_file", source="sdk-tests", limit=1) + return res.to_json()[0]["id"] def test_get_file_info(file_id): from cognite.v05.dto import FileInfoResponse + response = files.get_file_info(file_id) assert isinstance(response, FileInfoResponse) assert isinstance(response.to_json(), dict) @@ -54,7 +56,7 @@ def test_get_file_info(file_id): assert response.id == file_id -@pytest.mark.parametrize('get_contents', [True, False]) +@pytest.mark.parametrize("get_contents", [True, False]) def test_download_files(file_id, get_contents): try: response = files.download_file(file_id, get_contents) @@ -68,4 +70,4 @@ def test_download_files(file_id, get_contents): def test_delete_file(file_id): response = files.delete_files([file_id]) - assert file_id in response['deleted'] or file_id in response['failed'] + assert file_id in response["deleted"] or file_id in response["failed"] diff --git a/tests/v05/test_raw.py b/tests/v05/test_raw.py index aa9657c91d..54deb8ba74 100644 --- a/tests/v05/test_raw.py +++ b/tests/v05/test_raw.py @@ -14,39 +14,39 @@ ROW_COLUMNS = None -@pytest.fixture(autouse=True, scope='class') +@pytest.fixture(autouse=True, scope="class") def db_name(): global DB_NAME - DB_NAME = 'test_db_{}'.format(randint(1, 2 ** 53 - 1)) + DB_NAME = "test_db_{}".format(randint(1, 2 ** 53 - 1)) -@pytest.fixture(autouse=True, scope='class') +@pytest.fixture(autouse=True, scope="class") def table_name(): global TABLE_NAME - TABLE_NAME = 'test_table_{}'.format(randint(1, 2 ** 53 - 1)) + TABLE_NAME = "test_table_{}".format(randint(1, 2 ** 53 - 1)) -@pytest.fixture(autouse=True, scope='class') +@pytest.fixture(autouse=True, scope="class") def row_key(): global ROW_KEY - ROW_KEY = 'test_key_{}'.format(randint(1, 2 ** 53 - 1)) + ROW_KEY = "test_key_{}".format(randint(1, 2 ** 53 - 1)) -@pytest.fixture(autouse=True, scope='class') +@pytest.fixture(autouse=True, scope="class") def row_columns(): global ROW_COLUMNS - ROW_COLUMNS = {'col1': 'val1'} + ROW_COLUMNS = {"col1": "val1"} class TestDatabases: - @pytest.fixture(scope='class') + @pytest.fixture(scope="class") def databases(self): yield raw.get_databases() def test_create_databases(self): response = raw.create_databases([DB_NAME]) assert isinstance(response, RawResponse) - assert response.to_json()[0]['dbName'] == DB_NAME + assert response.to_json()[0]["dbName"] == DB_NAME def test_databases_response_length(self, databases): assert len(databases.to_json()) > 0 @@ -65,20 +65,20 @@ def test_delete_databases(self): class TestTables: - @pytest.fixture(autouse=True, scope='class') + @pytest.fixture(autouse=True, scope="class") def create_database(self): raw.create_databases([DB_NAME]) yield raw.delete_databases([DB_NAME], recursive=True) - @pytest.fixture(scope='class') + @pytest.fixture(scope="class") def tables(self): yield raw.get_tables(DB_NAME) def test_create_tables(self): response = raw.create_tables(DB_NAME, [TABLE_NAME]) # assert isinstance(response, RawObject) - assert response.to_json()[0]['tableName'] == TABLE_NAME + assert response.to_json()[0]["tableName"] == TABLE_NAME def test_tables_response_length(self, tables): assert len(tables.to_json()) > 0 @@ -99,7 +99,7 @@ def test_delete_tables(self): class TestRows: - @pytest.fixture(autouse=True, scope='class') + @pytest.fixture(autouse=True, scope="class") def create_database(self): raw.create_databases([DB_NAME]) raw.create_tables(DB_NAME, [TABLE_NAME]) diff --git a/tests/v06/test_sequences.py b/tests/v06/test_sequences.py index ac2eaaad9a..7538948e88 100644 --- a/tests/v06/test_sequences.py +++ b/tests/v06/test_sequences.py @@ -14,11 +14,8 @@ class TestSequences: - @pytest.fixture(scope="class") - def sequence_that_isnt_created( - self - ): + def sequence_that_isnt_created(self): """Returns a Sequence that hasn't been created yet. (It does not have an ID)""" global SEQUENCE_EXTERNAL_ID @@ -28,30 +25,15 @@ def sequence_that_isnt_created( external_id=SEQUENCE_EXTERNAL_ID, asset_id=None, columns=[ - Column( - id=None, - name="test_column", - external_id="external_id", - value_type="STRING", - metadata={} - ), - Column( - id=None, - name="test_column2", - external_id="external_id2", - value_type="STRING", - metadata={} - ) + Column(id=None, name="test_column", external_id="external_id", value_type="STRING", metadata={}), + Column(id=None, name="test_column2", external_id="external_id2", value_type="STRING", metadata={}), ], description="Test sequence", - metadata={} + metadata={}, ) @pytest.fixture(scope="class") - def sequence_that_is_created_retrieved_by_id( - self, - sequence_that_isnt_created - ): + def sequence_that_is_created_retrieved_by_id(self, sequence_that_isnt_created): """Returns the created sequence by using the cognite id""" global CREATED_SEQUENCE_ID if CREATED_SEQUENCE_ID: @@ -64,10 +46,7 @@ def sequence_that_is_created_retrieved_by_id( return created_sequence @pytest.fixture(scope="class") - def sequence_that_is_created_retrieved_by_external_id( - self, - sequence_that_isnt_created - ): + def sequence_that_is_created_retrieved_by_external_id(self, sequence_that_isnt_created): """Returns the created sequence by using the external id""" global CREATED_SEQUENCE_ID, SEQUENCE_EXTERNAL_ID if CREATED_SEQUENCE_ID: @@ -79,54 +58,37 @@ def sequence_that_is_created_retrieved_by_external_id( CREATED_SEQUENCE_ID = created_sequence.id return created_sequence - def test_get_sequence_by_id( - self, - sequence_that_is_created_retrieved_by_id, - sequence_that_isnt_created - ): + def test_get_sequence_by_id(self, sequence_that_is_created_retrieved_by_id, sequence_that_isnt_created): global CREATED_SEQUENCE_ID assert isinstance(sequence_that_is_created_retrieved_by_id, Sequence) assert sequence_that_is_created_retrieved_by_id.id == CREATED_SEQUENCE_ID assert sequence_that_is_created_retrieved_by_id.name == sequence_that_isnt_created.name def test_get_sequence_by_external_id( - self, - sequence_that_is_created_retrieved_by_external_id, - sequence_that_isnt_created + self, sequence_that_is_created_retrieved_by_external_id, sequence_that_isnt_created ): global CREATED_SEQUENCE_ID assert isinstance(sequence_that_is_created_retrieved_by_external_id, Sequence) assert sequence_that_is_created_retrieved_by_external_id.id == CREATED_SEQUENCE_ID assert sequence_that_is_created_retrieved_by_external_id.name == sequence_that_isnt_created.name - def test_post_data_to_sequence_and_get_data_from_sequence( - self, - sequence_that_is_created_retrieved_by_id - ): + def test_post_data_to_sequence_and_get_data_from_sequence(self, sequence_that_is_created_retrieved_by_id): # Prepare some data to post rows: List[Row] = [ Row( row_number=1, values=[ - RowValue( - column_id=sequence_that_is_created_retrieved_by_id.columns[0].id, - value="42" - ), - RowValue( - column_id=sequence_that_is_created_retrieved_by_id.columns[1].id, - value="43" - ) - ] + RowValue(column_id=sequence_that_is_created_retrieved_by_id.columns[0].id, value="42"), + RowValue(column_id=sequence_that_is_created_retrieved_by_id.columns[1].id, value="43"), + ], ) ] # Post data - res = sequences.post_data_to_sequence( - id=sequence_that_is_created_retrieved_by_id.id, - rows=rows - ) + res = sequences.post_data_to_sequence(id=sequence_that_is_created_retrieved_by_id.id, rows=rows) assert res == {} # Sleep a little, to give the api a chance to process the data import time + time.sleep(5) # Get the data sequenceDataResponse: SequenceDataResponse = sequences.get_data_from_sequence( @@ -136,8 +98,8 @@ def test_post_data_to_sequence_and_get_data_from_sequence( limit=1, column_ids=[ sequence_that_is_created_retrieved_by_id.columns[0].id, - sequence_that_is_created_retrieved_by_id.columns[1].id - ] + sequence_that_is_created_retrieved_by_id.columns[1].id, + ], ) # Verify that the data is the same assert rows[0].rowNumber == sequenceDataResponse.rows[0].rowNumber diff --git a/type_hint_remover.py b/type_hint_remover.py new file mode 100644 index 0000000000..53af9fd1a8 --- /dev/null +++ b/type_hint_remover.py @@ -0,0 +1,24 @@ +"""This module is used to remove typehints from source code to ensure python 3.5 compatibility.""" +import os +import re + +import strip_hints + +SKIP_DIRECTORIES = ["auxiliary", "__pycache__"] +PYTHON_FILE_PATTERN = r"^.+\.py$" + + +def strip_hints_and_overwrite(file_path): + print("*****Removing type hints in {}".format(file_path)) + transformed = strip_hints.strip_file_to_string(file_path, to_empty=True, only_assigns_and_defs=True) + with open(file_path, "w") as f: + f.write(transformed) + + +if __name__ == "__main__": + for root, _, files in os.walk("cognite"): + if not any(dir_name in root for dir_name in SKIP_DIRECTORIES): + for file in files: + if re.match(PYTHON_FILE_PATTERN, file): + file_path = "{}/{}".format(root, file) + strip_hints_and_overwrite(file_path)