From 11a51eb2da1c9818b7280a717ba11fc8cb60b98e Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 13 Jul 2022 13:52:35 -0700 Subject: [PATCH 01/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 26 +++++++++++++++++++ .../image_build_push.yaml | 0 .../workflows => workflowstodelete}/wool.yml | 0 3 files changed, 26 insertions(+) create mode 100644 .github/workflows/ci.yaml rename {.github/workflows => workflowstodelete}/image_build_push.yaml (100%) rename {.github/workflows => workflowstodelete}/wool.yml (100%) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 000000000..475acc924 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,26 @@ +name: CI + +on: push + +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + + +jobs: + Security: + name: Secure and Lint + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@master + secrets: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} + SNYK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} + + ci: + name: Build Image and Push + needs: Security + uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master + secrets: + ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }} + ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }} + QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }} + QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} diff --git a/.github/workflows/image_build_push.yaml b/workflowstodelete/image_build_push.yaml similarity index 100% rename from .github/workflows/image_build_push.yaml rename to workflowstodelete/image_build_push.yaml diff --git a/.github/workflows/wool.yml b/workflowstodelete/wool.yml similarity index 100% rename from .github/workflows/wool.yml rename to workflowstodelete/wool.yml From ec35137343647c9556d5f5c37ab30f892df4536d Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 13 Jul 2022 14:12:36 -0700 Subject: [PATCH 02/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 47 ++++++++++++++++++++++++++++++++++----- 1 file changed, 42 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 475acc924..c97b0cb01 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,11 +9,48 @@ concurrency: jobs: Security: - name: Secure and Lint - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@master - secrets: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} - SNYK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} + runs-on: 'ubuntu-latest' + # Superlint a simple combination of various linters, written in bash, to help validate source code. + call-workflow-1-in-local-repo: + uses: uc-cdis/.github/.github/workflows/lint.yaml@master + # CodeQL is a simple Github native Static code analysis tool + call-workflow2-in-local-repo: + uses: uc-cdis/.github/.github/workflows/codeql-scan.yaml@master + # Secrets detection is calling the Yelp Secrets Detector we use in pre-commit to ensure proper coverage + secrets-detection: + runs-on: 'ubuntu-latest' + steps: + - name: secrets detect + uses: RobertFischer/detect-secrets-action@v2.0.0 + - name: Report Status + if: always() + uses: ravsamhq/notify-slack-action@v1 + with: + status: ${{ job.status }} + notify_when: 'failure' + channel: infosec-alerts + env: + SLACK_WEBHOOK_URL: ${{ secrets.ACTION_MONITORING_SLACK }} + + + # Snyk native integration doesn't support poetry.lock files - this runs a check if a poetry.lock is found then runs a CLI scan to add coverage. + file_existence: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v1 + + - name: Check file existence + id: check_files + uses: andstor/file-existence-action@v1 + with: + files: "poetry.lock" + + - name: File exists + if: steps.check_files.outputs.files_exists == 'true' + # Only runs if all of the files exists + run: uc-cdis/.github/.github/actions/snykcli.yaml@master + ci: name: Build Image and Push From 7f3095b31a70079ce8cb8060f31c2c69a5b6691f Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 13 Jul 2022 14:43:18 -0700 Subject: [PATCH 03/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 47 +++++---------------------------------- 1 file changed, 5 insertions(+), 42 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c97b0cb01..e4252e990 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,48 +9,11 @@ concurrency: jobs: Security: - runs-on: 'ubuntu-latest' - # Superlint a simple combination of various linters, written in bash, to help validate source code. - call-workflow-1-in-local-repo: - uses: uc-cdis/.github/.github/workflows/lint.yaml@master - # CodeQL is a simple Github native Static code analysis tool - call-workflow2-in-local-repo: - uses: uc-cdis/.github/.github/workflows/codeql-scan.yaml@master - # Secrets detection is calling the Yelp Secrets Detector we use in pre-commit to ensure proper coverage - secrets-detection: - runs-on: 'ubuntu-latest' - steps: - - name: secrets detect - uses: RobertFischer/detect-secrets-action@v2.0.0 - - name: Report Status - if: always() - uses: ravsamhq/notify-slack-action@v1 - with: - status: ${{ job.status }} - notify_when: 'failure' - channel: infosec-alerts - env: - SLACK_WEBHOOK_URL: ${{ secrets.ACTION_MONITORING_SLACK }} - - - # Snyk native integration doesn't support poetry.lock files - this runs a check if a poetry.lock is found then runs a CLI scan to add coverage. - file_existence: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v1 - - - name: Check file existence - id: check_files - uses: andstor/file-existence-action@v1 - with: - files: "poetry.lock" - - - name: File exists - if: steps.check_files.outputs.files_exists == 'true' - # Only runs if all of the files exists - run: uc-cdis/.github/.github/actions/snykcli.yaml@master - + name: Secure and Lint + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@sdo-13-ci-using-gh-actions + secrets: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} + SNYK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} ci: name: Build Image and Push From d802fb6958d23f638e65b5c6ccf03c9a8f28dca5 Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 13 Jul 2022 14:44:22 -0700 Subject: [PATCH 04/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e4252e990..d05ecdd31 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -10,7 +10,7 @@ concurrency: jobs: Security: name: Secure and Lint - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@sdo-13-ci-using-gh-actions + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO-13-ci-using-gh-actions secrets: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} SNYK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} From 554954fba890094aa647dbe604040f575c5e7f32 Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 14 Jul 2022 08:57:01 -0700 Subject: [PATCH 05/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d05ecdd31..42b03f400 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,7 +13,7 @@ jobs: uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO-13-ci-using-gh-actions secrets: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} - SNYK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} + SRVC_SNYKGITACTION: ${{ secrets.SRVC_SNYKGITACTION }} ci: name: Build Image and Push From 2a9e0951527a11af61d6d487f5f7bcfdf99ba09b Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 14 Jul 2022 12:09:27 -0700 Subject: [PATCH 06/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 42b03f400..12304fda5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,7 +13,7 @@ jobs: uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO-13-ci-using-gh-actions secrets: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} - SRVC_SNYKGITACTION: ${{ secrets.SRVC_SNYKGITACTION }} + SYNK_TOKENN: ${{ secrets.SRVC_SNYKGITACTION }} ci: name: Build Image and Push From 730c502c473854dbec61fb8e5fa65886800620bc Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 14 Jul 2022 12:11:46 -0700 Subject: [PATCH 07/52] Empty-Commit From 85b0dd3ee9b18e3d88628b2abacd372b0c7c7225 Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 14 Jul 2022 12:12:59 -0700 Subject: [PATCH 08/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 12304fda5..5656935d2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,7 +13,7 @@ jobs: uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO-13-ci-using-gh-actions secrets: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} - SYNK_TOKENN: ${{ secrets.SRVC_SNYKGITACTION }} + SYNK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} ci: name: Build Image and Push From be5f36d3d78f4690bc7f6fa7004fe200b407a01d Mon Sep 17 00:00:00 2001 From: Clint Date: Fri, 15 Jul 2022 07:27:55 -0700 Subject: [PATCH 09/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5656935d2..d05ecdd31 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,7 +13,7 @@ jobs: uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO-13-ci-using-gh-actions secrets: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} - SYNK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} + SNYK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} ci: name: Build Image and Push From 4e17ef543f4b56ecf05626dcf45f88ed7d7a71a8 Mon Sep 17 00:00:00 2001 From: Clint Date: Fri, 15 Jul 2022 07:28:46 -0700 Subject: [PATCH 10/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d05ecdd31..ba0c32d34 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,9 +11,7 @@ jobs: Security: name: Secure and Lint uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO-13-ci-using-gh-actions - secrets: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY }} - SNYK_TOKEN: ${{ secrets.SRVC_SNYKGITACTION }} + ci: name: Build Image and Push From 67cd84d0dd348a3473e56b09c5fac14a7278dc82 Mon Sep 17 00:00:00 2001 From: Clint Date: Fri, 15 Jul 2022 07:54:36 -0700 Subject: [PATCH 11/52] chore: Refactoring CI to use GitHub Actions --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ba0c32d34..c11d73bb8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,7 +11,7 @@ jobs: Security: name: Secure and Lint uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO-13-ci-using-gh-actions - + secrets: inherit ci: name: Build Image and Push From 9b8e96e814729bbea694167125c5b9c1d2ed1c71 Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 3 Aug 2022 13:54:07 -0700 Subject: [PATCH 12/52] chore: changing to use main branch for security pipelines --- .github/workflows/ci.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c11d73bb8..22934266f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,6 +1,8 @@ name: CI on: push + pull_request: + types: [opened, reopened] concurrency: group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' @@ -10,7 +12,7 @@ concurrency: jobs: Security: name: Secure and Lint - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO-13-ci-using-gh-actions + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@master secrets: inherit ci: From 66a9ef960544e31c029c83c3cd13ceafe275b9c3 Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 3 Aug 2022 13:55:13 -0700 Subject: [PATCH 13/52] chore: changing to use main branch for security pipelines --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 22934266f..206ed5c65 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,6 +1,7 @@ name: CI -on: push +on: + push: pull_request: types: [opened, reopened] From 61778929b84210eca1da1242fa6006c97b550185 Mon Sep 17 00:00:00 2001 From: Pauline Ribeyre <4224001+paulineribeyre@users.noreply.github.com> Date: Mon, 15 Aug 2022 14:37:19 -0500 Subject: [PATCH 14/52] PXP-10113 Client credentials grant (#1033) --- .pre-commit-config.yaml | 2 +- .secrets.baseline | 56 +++++++- Dockerfile | 2 +- README.md | 28 +++- bin/fence_create.py | 5 +- bin/old_migration_script.py | 2 +- fence/auth.py | 14 +- fence/blueprints/data/indexd.py | 84 +++++++----- fence/jwt/token.py | 44 ++++--- fence/models.py | 20 +++ fence/oidc/grants/__init__.py | 1 + fence/oidc/grants/client_credentials_grant.py | 1 + fence/oidc/jwt_generator.py | 67 ++++++++++ fence/oidc/oidc_server.py | 4 +- fence/oidc/server.py | 8 +- fence/resources/storage/cdis_jwt.py | 4 +- fence/resources/user/user_session.py | 2 +- fence/scripting/fence_create.py | 29 ++--- fence/sync/sync_users.py | 2 + fence/utils.py | 19 ++- ...7e1b843f82_optional_client_redirect_uri.py | 26 ++++ openapis/swagger.yaml | 25 ++-- tests/conftest.py | 70 +++++++++- tests/credentials/api_key/test_access.py | 2 - tests/credentials/google/test_credentials.py | 6 +- ...zure_blob_storage_indexed_file_location.py | 2 +- tests/data/test_data.py | 45 +++++++ tests/jwt/test_oversized_jwt.py | 2 +- tests/jwt/test_tokens.py | 2 +- tests/migrations/test_ea7e1b843f82.py | 123 ++++++++++++++++++ tests/rfc6749/test_oauth2.py | 76 +++++++++++ tests/scripting/test_fence-create.py | 83 +++++++++++- tests/test_datamodel.py | 1 + tests/utils/__init__.py | 25 ++++ tests/utils/oauth2/client.py | 60 ++++++--- 35 files changed, 801 insertions(+), 141 deletions(-) create mode 100644 fence/oidc/grants/client_credentials_grant.py create mode 100644 migrations/versions/ea7e1b843f82_optional_client_redirect_uri.py create mode 100644 tests/migrations/test_ea7e1b843f82.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 38ceb9fd6..554d3c29c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 22.6.0 hooks: - id: black - repo: git@github.com:Yelp/detect-secrets diff --git a/.secrets.baseline b/.secrets.baseline index 0d2fda218..7974f4d3f 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -189,7 +189,7 @@ "filename": "fence/utils.py", "hashed_secret": "8318df9ecda039deac9868adf1944a29a95c7114", "is_verified": false, - "line_number": 105 + "line_number": 110 } ], "migrations/versions/e4c7b0ab68d3_create_tables.py": [ @@ -201,20 +201,36 @@ "line_number": 22 } ], + "migrations/versions/ea7e1b843f82_optional_client_redirect_uri.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/ea7e1b843f82_optional_client_redirect_uri.py", + "hashed_secret": "bb2372fb50034d559d2920e7229fb5879cf1be72", + "is_verified": false, + "line_number": 13 + }, + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/ea7e1b843f82_optional_client_redirect_uri.py", + "hashed_secret": "adb1fcd33b07abf9b6a064745759accea5cb341f", + "is_verified": false, + "line_number": 14 + } + ], "tests/conftest.py": [ { "type": "Private Key", "filename": "tests/conftest.py", "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", "is_verified": false, - "line_number": 1516 + "line_number": 1574 }, { "type": "Base64 High Entropy String", "filename": "tests/conftest.py", "hashed_secret": "227dea087477346785aefd575f91dd13ab86c108", "is_verified": false, - "line_number": 1539 + "line_number": 1597 } ], "tests/credentials/google/test_credentials.py": [ @@ -238,6 +254,13 @@ "hashed_secret": "768b7fe00de4fd233c0c72375d12f87ce9670144", "is_verified": false, "line_number": 476 + }, + { + "type": "Secret Keyword", + "filename": "tests/credentials/google/test_credentials.py", + "hashed_secret": "22afbfecd4124e2eb0e2a79fafdf62b207a8f8c7", + "is_verified": false, + "line_number": 580 } ], "tests/data/test_indexed_file.py": [ @@ -267,6 +290,22 @@ "line_number": 48 } ], + "tests/migrations/test_ea7e1b843f82.py": [ + { + "type": "Hex High Entropy String", + "filename": "tests/migrations/test_ea7e1b843f82.py", + "hashed_secret": "adb1fcd33b07abf9b6a064745759accea5cb341f", + "is_verified": false, + "line_number": 27 + }, + { + "type": "Hex High Entropy String", + "filename": "tests/migrations/test_ea7e1b843f82.py", + "hashed_secret": "bb2372fb50034d559d2920e7229fb5879cf1be72", + "is_verified": false, + "line_number": 44 + } + ], "tests/ras/test_ras.py": [ { "type": "Hex High Entropy String", @@ -276,6 +315,15 @@ "line_number": 120 } ], + "tests/scripting/test_fence-create.py": [ + { + "type": "Secret Keyword", + "filename": "tests/scripting/test_fence-create.py", + "hashed_secret": "e5e9fa1ba31ecd1ae84f75caaa474f3a663f05f4", + "is_verified": false, + "line_number": 221 + } + ], "tests/test-fence-config.yaml": [ { "type": "Basic Auth Credentials", @@ -286,5 +334,5 @@ } ] }, - "generated_at": "2022-07-08T21:32:25Z" + "generated_at": "2022-08-12T15:25:33Z" } diff --git a/Dockerfile b/Dockerfile index 3b11a8a46..0f397ebd3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ RUN apt-get update \ && apt-get -y install vim \ libmcrypt4 libmhash2 mcrypt \ && apt-get clean \ - && rm -rf /var/lib/apt/lists/ + && rm -rf /var/lib/apt/lists/ RUN mkdir -p /var/www/$appname \ && mkdir -p /var/www/.cache/Python-Eggs/ \ diff --git a/README.md b/README.md index c66b8c59d..56ec488b7 100644 --- a/README.md +++ b/README.md @@ -415,7 +415,7 @@ We use JSON Web Tokens (JWTs) as the format for all tokens of the following type } } }, - "iss": "https://bionimbus-pdc.opensciencedatacloud.org", + "iss": "https://commons.org", "jti": "3ae2910b-0294-43dc-af2a-03fd60082aef", "exp": 1516983302, "iat": 1516982102, @@ -454,7 +454,7 @@ We use JSON Web Tokens (JWTs) as the format for all tokens of the following type } } }, - "iss": "https://bionimbus-pdc.opensciencedatacloud.org", + "iss": "https://commons.org", "jti": "2e6ade06-5afb-4ce7-9ab5-e206225ce291", "exp": 1516983302, "iat": 1516982102 @@ -473,7 +473,7 @@ We use JSON Web Tokens (JWTs) as the format for all tokens of the following type "user", "test-client" ], - "iss": "https://bionimbus-pdc.opensciencedatacloud.org", + "iss": "https://commons.org", "jti": "c72e5573-39fa-4391-a445-191e370b7cc5", "exp": 1517010902, "iat": 1516982102 @@ -515,6 +515,28 @@ To specify allowed scopes, use the `allowed-scopes` argument: fence-create client-create ... --allowed-scopes openid user data ``` +#### Register an Oauth Client for a Client Credentials flow + +The OAuth2 Client Credentials flow is used for machine-to-machine communication and scenarios in which typical authentication schemes like username + password do not make sense. The system authenticates and authorizes the app rather than a user. See the [OAuth2 specification](https://www.rfc-editor.org/rfc/rfc6749#section-4.4) for more details. + +As a Gen3 commons administrator, if you want to create an OAuth client for a client credentials flow: + +```bash +fence-create client-create --client CLIENT_NAME --grant-types client_credentials +``` + +This command will return a client ID and client secret, which you can then use to obtain an access token: + +```bash +curl --request POST https://FENCE_URL/oauth2/token?grant_type=client_credentials -d scope="openid user" --user CLIENT_ID:CLIENT_SECRET +``` + +NOTE: In Gen3, you can grant specific access to a client the same way you would to a user. See the [user.yaml guide](https://github.com/uc-cdis/fence/blob/master/docs/user.yaml_guide.md) for more details. + +NOTE: Client credentials tokens are not linked to a user. They are not supported by all Gen3 endpoints. + +NOTE: The recommendation is to rotate these credentials at least once a year. Credentials expiration is not enforced at the moment but may be in the future. + #### Modify OAuth Client ```bash diff --git a/bin/fence_create.py b/bin/fence_create.py index 323c55205..dfe3e713b 100755 --- a/bin/fence_create.py +++ b/bin/fence_create.py @@ -69,11 +69,10 @@ def parse_arguments(): client_create = subparsers.add_parser("client-create") client_create.add_argument("--client", required=True) - client_create.add_argument("--urls", required=True, nargs="+") + client_create.add_argument("--urls", nargs="+") client_create.add_argument( "--username", help="user(can represent an organization) that owns the client", - required=True, ) client_create.add_argument( "--external", @@ -89,7 +88,7 @@ def parse_arguments(): ) client_create.add_argument( "--grant-types", - help="which OAuth2 grant types are enabled for this client", + help="which OAuth2 grant types are enabled for this client (default: authorization_code and refresh_token)", nargs="+", ) client_create.add_argument( diff --git a/bin/old_migration_script.py b/bin/old_migration_script.py index e89c7d8ce..6e1bb181a 100644 --- a/bin/old_migration_script.py +++ b/bin/old_migration_script.py @@ -4,7 +4,7 @@ pre-Alembic version to a post-Alembic version. DO NOT ADD NEW MIGRATIONS TO THIS SCRIPT. -Create a new Alembic version instead. +Create a new Alembic revision instead. """ diff --git a/fence/auth.py b/fence/auth.py index 516876561..fda82b402 100644 --- a/fence/auth.py +++ b/fence/auth.py @@ -234,12 +234,14 @@ def has_oauth(scope=None): ) except JWTError as e: raise Unauthorized("failed to validate token: {}".format(e)) - user_id = access_token_claims["sub"] - user = current_session.query(User).filter_by(id=int(user_id)).first() - if not user: - raise Unauthorized("no user found with id: {}".format(user_id)) - # set some application context for current user and client id - flask.g.user = user + if "sub" in access_token_claims: + user_id = access_token_claims["sub"] + user = current_session.query(User).filter_by(id=int(user_id)).first() + if not user: + raise Unauthorized("no user found with id: {}".format(user_id)) + # set some application context for current user + flask.g.user = user + # set some application context for current client id # client_id should be None if the field doesn't exist or is empty flask.g.client_id = access_token_claims.get("azp") or None flask.g.token = access_token_claims diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 34e6342d5..91e477087 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -32,6 +32,7 @@ ) from fence.config import config from fence.errors import ( + Forbidden, InternalError, NotFound, NotSupported, @@ -119,12 +120,12 @@ def get_signed_url_for_file( "sub": user.id, } else: - user_info = _get_user_info_for_id_or_from_request( + auth_info = _get_auth_info_for_id_or_from_request( sub_type=int, db_session=db_session ) flask.g.audit_data = { - "username": user_info["username"], - "sub": user_info["user_id"], + "username": auth_info["username"], + "sub": auth_info["user_id"], } indexed_file = IndexedFile(file_id) @@ -643,7 +644,9 @@ def check_legacy_authorization(self, action): ) return self.index_document.get("uploader") == username - given_acls = set(filter_auth_ids(action, flask.g.user.project_access)) + given_acls = set() + if hasattr(flask.g, "user"): + given_acls = set(filter_auth_ids(action, flask.g.user.project_access)) return len(self.set_acls & given_acls) > 0 @login_required({"data"}) @@ -868,7 +871,7 @@ def assume_role(cls, bucket_cred, expires_in, aws_creds_config, boto=None): def bucket_name(self): """ Return: - Optional[str]: bucket name or None if not not in cofig + Optional[str]: bucket name or None if not in config """ s3_buckets = get_value( flask.current_app.config, @@ -992,7 +995,7 @@ def get_signed_url( self.parsed_url.netloc, credential ) - user_info = _get_user_info_for_id_or_from_request(user=authorized_user) + auth_info = _get_auth_info_for_id_or_from_request(user=authorized_user) url = generate_aws_presigned_url( http_url, @@ -1001,7 +1004,7 @@ def get_signed_url( "s3", region, expires_in, - user_info, + auth_info, ) return url @@ -1118,11 +1121,11 @@ def get_signed_url( ): resource_path = self.get_resource_path() - user_info = _get_user_info_for_id_or_from_request(user=authorized_user) + auth_info = _get_auth_info_for_id_or_from_request(user=authorized_user) if not force_signed_url: url = "https://storage.cloud.google.com/" + resource_path - elif _is_anonymous_user(user_info): + elif _is_anonymous_user(auth_info): url = self._generate_anonymous_google_storage_signed_url( ACTION_DICT["gs"][action], resource_path, int(expires_in) ) @@ -1131,8 +1134,8 @@ def get_signed_url( ACTION_DICT["gs"][action], resource_path, int(expires_in), - user_info.get("user_id"), - user_info.get("username"), + auth_info.get("user_id"), + auth_info.get("username"), r_pays_project=r_pays_project, ) @@ -1451,9 +1454,9 @@ def get_signed_url( container_name, blob_name = self._get_container_and_blob() - user_info = _get_user_info_for_id_or_from_request(user=authorized_user) - if _is_anonymous_user(user_info): - logger.info(f"Attempting to get a signed url an anonymous user") + auth_info = _get_auth_info_for_id_or_from_request(user=authorized_user) + if _is_anonymous_user(auth_info): + logger.info(f"Attempting to get a signed url for an anonymous user") # if it's public and we don't need to force the signed url, just return the raw # url @@ -1512,12 +1515,12 @@ def delete(self, container, blob): # pylint: disable=R0201 return ("Failed to delete data file.", status_code) -def _get_user_info_for_id_or_from_request( +def _get_auth_info_for_id_or_from_request( sub_type=str, user=None, username=None, db_session=None ): """ - Attempt to parse the request to get information about user. fallback to - populated information about an anonymous user. + Attempt to parse the request to get information about user and client. + Fallback to populated information about an anonymous user. By default, cast `sub` to str. Use `sub_type` to override this behavior. @@ -1528,6 +1531,15 @@ def _get_user_info_for_id_or_from_request( """ db_session = db_session or current_session + # set default "anonymous" user_id and username + # this is fine b/c it might be public data or a client token that is not + # linked to a user + final_user_id = None + if sub_type == str: + final_user_id = sub_type(ANONYMOUS_USER_ID) + final_username = ANONYMOUS_USERNAME + + token = "" try: if user: final_username = user.username @@ -1537,31 +1549,43 @@ def _get_user_info_for_id_or_from_request( final_username = result.username final_user_id = sub_type(result.id) else: - set_current_token( - validate_request(scope={"user"}, audience=config.get("BASE_URL")) - ) + token = validate_request(scope={"user"}, audience=config.get("BASE_URL")) + set_current_token(token) final_user_id = current_token["sub"] final_user_id = sub_type(final_user_id) final_username = current_token["context"]["user"]["name"] except Exception as exc: logger.info( - "could not determine user info from request. setting anonymous user information." + f"could not determine user auth info from request. setting anonymous user information. Details:\n{exc}" + ) + + client_id = "" + try: + if not token: + token = validate_request(scope=[], audience=config.get("BASE_URL")) + set_current_token(token) + client_id = current_token.get("azp") or "" + except Exception as exc: + logger.info( + f"could not determine client auth info from request. setting anonymous client information. Details:\n{exc}" ) - # this is fine b/c it might be public data, sign with anonymous username/id - final_user_id = None - if sub_type == str: - final_user_id = sub_type(ANONYMOUS_USER_ID) - final_username = ANONYMOUS_USERNAME - return {"user_id": final_user_id, "username": final_username} + if final_username == ANONYMOUS_USERNAME and client_id != "": + raise Forbidden("This endpoint does not support client credentials tokens") + + return { + "user_id": final_user_id, + "username": final_username, + "client_id": client_id, + } -def _is_anonymous_user(user_info): +def _is_anonymous_user(auth_info): """ Check if there's a current user authenticated or if request is anonymous """ - user_info = user_info or _get_user_info_for_id_or_from_request() - return str(user_info.get("user_id")) == ANONYMOUS_USER_ID + auth_info = auth_info or _get_auth_info_for_id_or_from_request() + return str(auth_info.get("user_id")) == ANONYMOUS_USER_ID def filter_auth_ids(action, list_auth_ids): diff --git a/fence/jwt/token.py b/fence/jwt/token.py index 2ac64b0be..d02a6ba47 100644 --- a/fence/jwt/token.py +++ b/fence/jwt/token.py @@ -307,7 +307,9 @@ def generate_signed_refresh_token( if client_id: claims["aud"].append(client_id) - logger.info("issuing JWT refresh token with id [{}] to [{}]".format(jti, sub)) + logger.info( + "issuing JWT refresh token with id [{}] to user sub [{}]".format(jti, sub) + ) logger.debug(f"issuing JWT refresh token: {claims}") token = jwt.encode(claims, private_key, headers=headers, algorithm="RS256") @@ -358,9 +360,9 @@ def generate_api_key(kid, private_key, user_id, expires_in, scopes, client_id): def generate_signed_access_token( kid, private_key, - user, expires_in, scopes, + user=None, iss=None, forced_exp_time=None, client_id=None, @@ -373,9 +375,9 @@ def generate_signed_access_token( Args: kid (str): key id of the keypair used to generate token private_key (str): RSA private key to sign and encode the JWT with - user (fence.models.User): User to generate ID token for expires_in (int): seconds until expiration scopes (List[str]): oauth scopes for user + user (fence.models.User): optional - User to generate ID token for Return: str: encoded JWT access token signed with ``private_key`` @@ -384,7 +386,6 @@ def generate_signed_access_token( iat, exp = issued_and_expiration_times(expires_in) # force exp time if provided exp = forced_exp_time or exp - sub = str(user.id) jti = str(uuid.uuid4()) if not iss: try: @@ -397,20 +398,13 @@ def generate_signed_access_token( claims = { "pur": "access", - "sub": sub, "iss": iss, "aud": [iss], "iat": iat, "exp": exp, "jti": jti, "scope": scopes, - "context": { - "user": { - "name": user.username, - "is_admin": user.is_admin, - "google": {"proxy_group": user.google_proxy_group_id}, - } - }, + "context": {}, "azp": client_id or "", } @@ -421,13 +415,27 @@ def generate_signed_access_token( if scopes: claims["aud"] += scopes - # only add google linkage information if provided - if linked_google_email: - claims["context"]["user"]["google"][ - "linked_google_account" - ] = linked_google_email + sub = None + if user: + sub = str(user.id) + claims["sub"] = sub + claims["context"]["user"] = { + "name": user.username, + "is_admin": user.is_admin, + "google": {"proxy_group": user.google_proxy_group_id}, + } + + # only add google linkage information if provided + if linked_google_email: + claims["context"]["user"]["google"][ + "linked_google_account" + ] = linked_google_email - logger.info("issuing JWT access token with id [{}] to [{}]".format(jti, sub)) + logger.info( + "issuing JWT access token with id [{}] to user sub [{}] and client id [{}]".format( + jti, sub, client_id + ) + ) logger.debug(f"issuing JWT access token {claims}") token = jwt.encode(claims, private_key, headers=headers, algorithm="RS256") diff --git a/fence/models.py b/fence/models.py index 1122e14cf..985db5f7d 100644 --- a/fence/models.py +++ b/fence/models.py @@ -214,6 +214,25 @@ def __init__(self, client_id, **kwargs): # assume it's already in correct format kwargs["grant_type"] = grant_types + supported_grant_types = [ + "authorization_code", + "refresh_token", + "implicit", + "client_credentials", + ] + assert all( + grant_type in supported_grant_types + for grant_type in kwargs["grant_type"].split("\n") + ), f"Grant types '{kwargs['grant_type']}' are not in supported types {supported_grant_types}" + + if "authorization_code" in kwargs["grant_type"].split("\n"): + assert kwargs.get("user") or kwargs.get( + "user_id" + ), "A username is required for the 'authorization_code' grant" + assert kwargs.get( + "redirect_uri" + ), "Redirect URL(s) are required for the 'authorization_code' grant" + super(Client, self).__init__(client_id=client_id, **kwargs) @property @@ -260,6 +279,7 @@ def check_client_secret(self, client_secret): def check_requested_scopes(self, scopes): if "openid" not in scopes: + logger.error(f"Invalid scopes: 'openid' not in requested scopes ({scopes})") return False return set(self.allowed_scopes).issuperset(scopes) diff --git a/fence/oidc/grants/__init__.py b/fence/oidc/grants/__init__.py index 9ca3cfbfb..f3740ba19 100644 --- a/fence/oidc/grants/__init__.py +++ b/fence/oidc/grants/__init__.py @@ -1,3 +1,4 @@ from fence.oidc.grants.implicit_grant import ImplicitGrant from fence.oidc.grants.oidc_code_grant import OpenIDCodeGrant from fence.oidc.grants.refresh_token_grant import RefreshTokenGrant +from fence.oidc.grants.client_credentials_grant import ClientCredentialsGrant diff --git a/fence/oidc/grants/client_credentials_grant.py b/fence/oidc/grants/client_credentials_grant.py new file mode 100644 index 000000000..4c130db69 --- /dev/null +++ b/fence/oidc/grants/client_credentials_grant.py @@ -0,0 +1 @@ +from authlib.oauth2.rfc6749.grants import ClientCredentialsGrant diff --git a/fence/oidc/jwt_generator.py b/fence/oidc/jwt_generator.py index 9541151e1..eb9073113 100644 --- a/fence/oidc/jwt_generator.py +++ b/fence/oidc/jwt_generator.py @@ -51,6 +51,8 @@ def generate_token(client, grant_type, **kwargs): return generate_token_response(client, grant_type, **kwargs) elif grant_type == "implicit": return generate_implicit_response(client, grant_type, **kwargs) + elif grant_type == "client_credentials": + return generate_client_response(client, **kwargs) def generate_implicit_response( @@ -63,6 +65,18 @@ def generate_implicit_response( nonce=None, **kwargs ): + """ + Generate the token response for the "implicit" grant. + + Return: + dict: token response + { + "token_type": "Bearer", + "id_token": "", + "access_token": "", + "expires_in": 1200, + } + """ # prevent those bothersome "not bound to session" errors if user not in current_session: user = current_session.query(User).filter_by(id=user.id).first() @@ -140,6 +154,20 @@ def generate_token_response( refresh_token_claims=None, **kwargs ): + """ + Generate the token response for the "authorization_code" and + "refresh_token" grants. + + Return: + dict: token response + { + "token_type": "Bearer", + "id_token": "", + "access_token": "", + "refresh_token": "", + "expires_in": 1200, + } + """ # prevent those bothersome "not bound to session" errors if user not in current_session: user = current_session.query(User).filter_by(id=user.id).first() @@ -220,3 +248,42 @@ def generate_token_response( "refresh_token": refresh_token, "expires_in": expires_in, } + + +def generate_client_response(client, expires_in=None, scope=None, **kwargs): + """ + Generate the token response for the "client_credentials" grant. + + Args: + client (Client): OIDC client that initiated the request + expires_in (int): Optional (default: configurable + `ACCESS_TOKEN_EXPIRES_IN`) - token lifetime in seconds + scope (List[str]): list of requested scopes + + Return: + dict: token response + { + "token_type": "Bearer", + "access_token": "", + "expires_in": 1200, + } + """ + keypair = flask.current_app.keypairs[0] + expires_in = config["ACCESS_TOKEN_EXPIRES_IN"] or expires_in + + scope = scope or [] + if not isinstance(scope, list): + scope = scope.split(" ") + + access_token = generate_signed_access_token( + kid=keypair.kid, + private_key=keypair.private_key, + expires_in=expires_in, + scopes=scope, + client_id=client.client_id, + ).token + return { + "token_type": "Bearer", + "access_token": access_token, + "expires_in": expires_in, + } diff --git a/fence/oidc/oidc_server.py b/fence/oidc/oidc_server.py index d024d3896..391e42d92 100644 --- a/fence/oidc/oidc_server.py +++ b/fence/oidc/oidc_server.py @@ -41,8 +41,8 @@ class OIDCServer(AuthorizationServer): """ Implement the OIDC provider to attach to the flask app. - Specific OAuth grants (authorization code, refresh token) are added on to - a server instance using ``OIDCServer.register_grant_endpoint(grant)``. For + Specific OAuth grants (authorization code, refresh token, etc) are added + on to a server instance using ``OIDCServer.register_grant(grant)``. For usage, see ``fence/oidc/server.py``. """ diff --git a/fence/oidc/server.py b/fence/oidc/server.py index ac5929dba..846fd4224 100644 --- a/fence/oidc/server.py +++ b/fence/oidc/server.py @@ -8,7 +8,12 @@ from fence.oidc.client import authenticate_public_client, query_client from fence.oidc.endpoints import RevocationEndpoint -from fence.oidc.grants import OpenIDCodeGrant, ImplicitGrant, RefreshTokenGrant +from fence.oidc.grants import ( + OpenIDCodeGrant, + ImplicitGrant, + RefreshTokenGrant, + ClientCredentialsGrant, +) from fence.oidc.oidc_server import OIDCServer @@ -16,5 +21,6 @@ server.register_grant(OpenIDCodeGrant) server.register_grant(ImplicitGrant) server.register_grant(RefreshTokenGrant) +server.register_grant(ClientCredentialsGrant) server.register_endpoint(RevocationEndpoint) server.register_client_auth_method("none", authenticate_public_client) diff --git a/fence/resources/storage/cdis_jwt.py b/fence/resources/storage/cdis_jwt.py index 45c89cb47..e53eced85 100644 --- a/fence/resources/storage/cdis_jwt.py +++ b/fence/resources/storage/cdis_jwt.py @@ -16,7 +16,7 @@ def create_access_token(user, keypair, api_key, expires_in, scopes): except Exception as e: return flask.jsonify({"errors": str(e)}) return token.generate_signed_access_token( - keypair.kid, keypair.private_key, user, expires_in, scopes + keypair.kid, keypair.private_key, expires_in, scopes, user=user ).token @@ -70,5 +70,5 @@ def create_user_access_token(keypair, api_key, expires_in): except Exception as e: raise Unauthorized(str(e)) return token.generate_signed_access_token( - keypair.kid, keypair.private_key, user, expires_in, scopes + keypair.kid, keypair.private_key, expires_in, scopes, user=user ).token diff --git a/fence/resources/user/user_session.py b/fence/resources/user/user_session.py index 8a0a5731b..182465326 100644 --- a/fence/resources/user/user_session.py +++ b/fence/resources/user/user_session.py @@ -317,9 +317,9 @@ def _create_access_token_cookie(app, session, response, user): access_token = generate_signed_access_token( keypair.kid, keypair.private_key, - user, config.get("ACCESS_TOKEN_EXPIRES_IN"), scopes, + user=user, forced_exp_time=expiration, linked_google_email=linked_google_email, ).token diff --git a/fence/scripting/fence_create.py b/fence/scripting/fence_create.py index b9cca97bd..8f8b1f839 100644 --- a/fence/scripting/fence_create.py +++ b/fence/scripting/fence_create.py @@ -128,20 +128,19 @@ def modify_client_action( def create_client_action( DB, username=None, client=None, urls=None, auto_approve=False, **kwargs ): - try: - print( - "\nSave these credentials! Fence will not save the unhashed client secret." - ) - print("client id, client secret:") - # This should always be the last line of output and should remain in this format-- - # cloud-auto and gen3-qa use the output programmatically. - print( - create_client( - username, urls, DB, name=client, auto_approve=auto_approve, **kwargs - ) - ) - except Exception as e: - logger.error(str(e)) + print("\nSave these credentials! Fence will not save the unhashed client secret.") + res = create_client( + DB=DB, + username=username, + urls=urls, + name=client, + auto_approve=auto_approve, + **kwargs, + ) + print("client id, client secret:") + # This should always be the last line of output and should remain in this format-- + # cloud-auto and gen3-qa use the output programmatically. + print(res) def delete_client_action(DB, client_name): @@ -1008,9 +1007,9 @@ def create_access_token(self): return generate_signed_access_token( self.kid, self.private_key, - user, self.expires_in, self.scopes, + user=user, iss=self.base_url, ) diff --git a/fence/sync/sync_users.py b/fence/sync/sync_users.py index 46150cb19..04a1b13ce 100644 --- a/fence/sync/sync_users.py +++ b/fence/sync/sync_users.py @@ -1481,6 +1481,7 @@ def _sync(self, sess): self.sync_from_local_yaml_file, encrypted=False, logger=self.logger ) except (EnvironmentError, AssertionError) as e: + # TODO return an error code so usersync doesn't fail silently self.logger.error(str(e)) self.logger.error("aborting early") return @@ -2333,6 +2334,7 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): self.sync_from_local_yaml_file, encrypted=False, logger=self.logger ) except (EnvironmentError, AssertionError) as e: + # TODO return an error code so usersync doesn't fail silently self.logger.error(str(e)) self.logger.error("aborting early") return diff --git a/fence/utils.py b/fence/utils.py index e510a0f74..89be6b933 100644 --- a/fence/utils.py +++ b/fence/utils.py @@ -35,9 +35,9 @@ def json_res(data): def create_client( - username, - urls, DB, + username=None, + urls=[], name="", description="", auto_approve=False, @@ -51,7 +51,6 @@ def create_client( client_id = random_str(40) if arborist is not None: arborist.create_client(client_id, policies) - grant_types = grant_types driver = SQLAlchemyDriver(DB) client_secret = None hashed_secret = None @@ -61,6 +60,7 @@ def create_client( client_secret.encode("utf-8"), bcrypt.gensalt() ).decode("utf-8") auth_method = "client_secret_basic" if confidential else "none" + allowed_scopes = allowed_scopes or config["CLIENT_ALLOWED_SCOPES"] if not set(allowed_scopes).issubset(set(config["CLIENT_ALLOWED_SCOPES"])): raise ValueError( @@ -68,15 +68,19 @@ def create_client( config["CLIENT_ALLOWED_SCOPES"] ) ) + if "openid" not in allowed_scopes: allowed_scopes.append("openid") logger.warning('Adding required "openid" scope to list of allowed scopes.') + with driver.session as s: - user = query_for_user(session=s, username=username) + user = None + if username: + user = query_for_user(session=s, username=username) + if not user: + user = User(username=username, is_admin=is_admin) + s.add(user) - if not user: - user = User(username=username, is_admin=is_admin) - s.add(user) if s.query(Client).filter(Client.name == name).first(): if arborist is not None: arborist.delete_client(client_id) @@ -96,6 +100,7 @@ def create_client( ) s.add(client) s.commit() + return client_id, client_secret diff --git a/migrations/versions/ea7e1b843f82_optional_client_redirect_uri.py b/migrations/versions/ea7e1b843f82_optional_client_redirect_uri.py new file mode 100644 index 000000000..a9c4a9573 --- /dev/null +++ b/migrations/versions/ea7e1b843f82_optional_client_redirect_uri.py @@ -0,0 +1,26 @@ +"""Optional Client.redirect_uri + +Revision ID: ea7e1b843f82 +Revises: e4c7b0ab68d3 +Create Date: 2022-07-27 16:49:52.793557 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "ea7e1b843f82" +down_revision = "e4c7b0ab68d3" +branch_labels = None +depends_on = None + + +def upgrade(): + op.alter_column("client", "redirect_uri", nullable=True) + + +def downgrade(): + # replace null values with an empty string + op.execute("UPDATE client SET redirect_uri='' WHERE redirect_uri IS NULL") + op.alter_column("client", "redirect_uri", nullable=False) diff --git a/openapis/swagger.yaml b/openapis/swagger.yaml index 34164c508..8ac95a415 100644 --- a/openapis/swagger.yaml +++ b/openapis/swagger.yaml @@ -236,26 +236,33 @@ paths: properties: grant_type: description: >- - Value MUST be `"authorization_code"` (which is the - authorization flow used in this implementation) + Value MUST be one of the supported grant types: + `"authorization_code"` or `"client_credentials"` type: string code: description: >- - The authorization code returned from the OAuth2 - authorization request + (Required if `grant_type` is `"authorization_code"`; + unused otherwise) The authorization code returned from the + OAuth2 authorization request type: string redirect_uri: description: >- - Must be identical to the `"redirect_uri"` included in the - original authorization request + (Required if `grant_type` is `"authorization_code"`; + unused otherwise) Must be identical to the + `"redirect_uri"` included in the original authorization + request + type: string + scope: + description: >- + (Optional if `grant_type` is `"client_credentials"`; + unused otherwise) Requested authorization scope. If + provided, must include `openid`. `user` allows getting a + user's access information. type: string client_id: type: string required: - grant_type - - code - - redirect_uri - - client_id /oauth2/revoke: post: tags: diff --git a/tests/conftest.py b/tests/conftest.py index 92b6ce782..463302f59 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -540,6 +540,7 @@ def drop_all(): connection.begin() for table in reversed(models.Base.metadata.sorted_tables): connection.execute(table.delete()) + connection.close() request.addfinalizer(drop_all) @@ -1307,6 +1308,7 @@ def oauth_client(app, db_session, oauth_user, get_all_shib_idps_patcher): client_secret.encode("utf-8"), bcrypt.gensalt() ).decode("utf-8") test_user = db_session.query(models.User).filter_by(id=oauth_user.user_id).first() + grant_types = ["authorization_code", "refresh_token"] db_session.add( models.Client( client_id=client_id, @@ -1317,11 +1319,16 @@ def oauth_client(app, db_session, oauth_user, get_all_shib_idps_patcher): description="", is_confidential=True, name="testclient", - grant_types=["authorization_code", "refresh_token"], + grant_types=grant_types, ) ) db_session.commit() - return Dict(client_id=client_id, client_secret=client_secret, url=url) + return Dict( + client_id=client_id, + client_secret=client_secret, + url=url, + grant_types=grant_types, + ) @pytest.fixture(scope="function") @@ -1341,6 +1348,7 @@ def oauth_client_B(app, request, db_session): if not test_user: test_user = models.User(username="test", is_admin=False) db_session.add(test_user) + grant_types = ["authorization_code", "refresh_token"] db_session.add( models.Client( client_id=client_id, @@ -1351,12 +1359,17 @@ def oauth_client_B(app, request, db_session): description="", is_confidential=True, name="testclientb", - grant_types=["authorization_code", "refresh_token"], + grant_types=grant_types, ) ) db_session.commit() - return Dict(client_id=client_id, client_secret=client_secret, url=url) + return Dict( + client_id=client_id, + client_secret=client_secret, + url=url, + grant_types=grant_types, + ) @pytest.fixture(scope="function") @@ -1367,6 +1380,7 @@ def oauth_client_public(app, db_session, oauth_user): url = "https://oauth-test-client-public.net" client_id = "test-client-public" test_user = db_session.query(models.User).filter_by(id=oauth_user.user_id).first() + grant_types = ["authorization_code", "refresh_token"] db_session.add( models.Client( client_id=client_id, @@ -1376,11 +1390,46 @@ def oauth_client_public(app, db_session, oauth_user): description="", is_confidential=False, name="testclient-public", - grant_types=["authorization_code", "refresh_token"], + grant_types=grant_types, ) ) db_session.commit() - return Dict(client_id=client_id, url=url) + return Dict(client_id=client_id, url=url, grant_types=grant_types) + + +@pytest.fixture(scope="function") +def oauth_client_with_client_credentials(db_session, get_all_shib_idps_patcher): + """ + Create a confidential OAuth2 client and add it to the database along with a + test user for the client. + """ + url = "https://oauth-test-client-with-client-credentials.net" + client_id = "test-client-with-client-credentials" + client_secret = fence.utils.random_str(50) + hashed_secret = bcrypt.hashpw( + client_secret.encode("utf-8"), bcrypt.gensalt() + ).decode("utf-8") + grant_types = ["client_credentials"] + scopes = ["openid", "user", "data"] + db_session.add( + models.Client( + client_id=client_id, + client_secret=hashed_secret, + allowed_scopes=scopes, + description="", + is_confidential=True, + name="testclient-with-client-credentials", + grant_types=grant_types, + ) + ) + db_session.commit() + return Dict( + client_id=client_id, + client_secret=client_secret, + url=url, + grant_types=grant_types, + scopes=scopes, + ) @pytest.fixture(scope="function") @@ -1398,6 +1447,15 @@ def oauth_test_client_public(client, oauth_client_public): return OAuth2TestClient(client, oauth_client_public, confidential=False) +@pytest.fixture(scope="function") +def oauth_test_client_with_client_credentials( + client, oauth_client_with_client_credentials +): + return OAuth2TestClient( + client, oauth_client_with_client_credentials, confidential=True + ) + + @pytest.fixture(scope="session") def microsoft_oauth2_client(): settings = MagicMock() diff --git a/tests/credentials/api_key/test_access.py b/tests/credentials/api_key/test_access.py index 02cd86861..d52079081 100644 --- a/tests/credentials/api_key/test_access.py +++ b/tests/credentials/api_key/test_access.py @@ -2,8 +2,6 @@ Test using an API key to generate an access token. """ -import json - from tests.utils.api_key import get_api_key diff --git a/tests/credentials/google/test_credentials.py b/tests/credentials/google/test_credentials.py index 10970a9d1..a355ac8f8 100644 --- a/tests/credentials/google/test_credentials.py +++ b/tests/credentials/google/test_credentials.py @@ -576,7 +576,11 @@ def test_google_attempt_delete_unowned_access_token( # create a service account for A DIFFERENT CLIENT client_entry = Client( - client_id="NOT_THIS_GUY", client_secret="a0987u23on192y", name="NOT_THIS_GUY" + client_id="NOT_THIS_GUY", + client_secret="a0987u23on192y", + name="NOT_THIS_GUY", + user=User(username="client_user"), + redirect_uris=["localhost"], ) service_account = GoogleServiceAccount( google_unique_id="123456789", diff --git a/tests/data/test_azure_blob_storage_indexed_file_location.py b/tests/data/test_azure_blob_storage_indexed_file_location.py index ee3599b8a..1b663d7df 100755 --- a/tests/data/test_azure_blob_storage_indexed_file_location.py +++ b/tests/data/test_azure_blob_storage_indexed_file_location.py @@ -70,7 +70,7 @@ def test_get_signed_url( return_value=storage_account_matches, ): with patch( - "fence.blueprints.data.indexd._get_user_info_for_id_or_from_request", + "fence.blueprints.data.indexd._get_auth_info_for_id_or_from_request", return_value={"user_id": user_id}, ): azure_blob_storage_indexed_file_location = ( diff --git a/tests/data/test_data.py b/tests/data/test_data.py index f54f74b6d..85575bf8f 100755 --- a/tests/data/test_data.py +++ b/tests/data/test_data.py @@ -18,6 +18,7 @@ from fence.config import config +from fence.blueprints.data.indexd import ANONYMOUS_USER_ID, ANONYMOUS_USERNAME from tests import utils @@ -1822,3 +1823,47 @@ def test_delete_files(app, client, auth_client, encoded_creds_jwt, user_client): assert status == 400 fence.auth.config["MOCK_AUTH"] = False + + +def test_download_s3_file_with_client_token( + client, + indexd_client_accepting_record, + kid, + rsa_private_key, + mock_arborist_requests, +): + """ + Test that an access token that does not include a `sub` or `context.user. + name` (such as a token issued from the `client_credentials` flow) cannot be + used to download data from S3. + """ + indexd_record = { + **INDEXD_RECORD_WITH_PUBLIC_AUTHZ_POPULATED, + "did": "guid_for:test_download_file_with_client_token", + "authz": ["/test/resource/path"], + "urls": ["s3://bucket1/key"], + } + indexd_client_accepting_record(indexd_record) + mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}}) + client_credentials_token = utils.client_authorized_download_context_claims() + headers = { + "Authorization": "Bearer " + + jwt.encode( + client_credentials_token, + key=rsa_private_key, + headers={"kid": kid}, + algorithm="RS256", + ).decode("utf-8") + } + + response = client.get("/data/download/1", headers=headers) + assert response.status_code == 403 + + # Enable the block below if we start allowing downloads with client tokens + # signed_url = response.json.get("url") + # assert signed_url + # # check signing query parameters + # query_params = urllib.parse.parse_qs(signed_url) + # assert query_params.get("user_id") == [ANONYMOUS_USER_ID] + # assert query_params.get("username") == [ANONYMOUS_USERNAME] + # assert query_params.get("client_id") == [client_credentials_token["azp"]] diff --git a/tests/jwt/test_oversized_jwt.py b/tests/jwt/test_oversized_jwt.py index 08ce2d313..130b9b8f8 100644 --- a/tests/jwt/test_oversized_jwt.py +++ b/tests/jwt/test_oversized_jwt.py @@ -28,7 +28,7 @@ def test_oversized_access_token(app, rsa_private_key, test_user_a): _, exp = iat_and_exp() with pytest.raises(JWTSizeError): generate_signed_access_token( - oversized_junk(), rsa_private_key, test_user_a, exp, ["openid", "user"] + oversized_junk(), rsa_private_key, exp, ["openid", "user"], user=test_user_a ) diff --git a/tests/jwt/test_tokens.py b/tests/jwt/test_tokens.py index 99caafe05..e675d1e8d 100644 --- a/tests/jwt/test_tokens.py +++ b/tests/jwt/test_tokens.py @@ -21,9 +21,9 @@ def test_passport_access_token(app, kid, rsa_private_key, test_user_a): jwt_token = generate_signed_access_token( kid, rsa_private_key, - test_user_a, exp, ["openid", "user", "ga4gh_passport_v1"], + user=test_user_a, client_id="client_a", ) payload = jwt.decode(jwt_token.token, verify=False) diff --git a/tests/migrations/test_ea7e1b843f82.py b/tests/migrations/test_ea7e1b843f82.py new file mode 100644 index 000000000..8898f4caf --- /dev/null +++ b/tests/migrations/test_ea7e1b843f82.py @@ -0,0 +1,123 @@ +""" +"Optional Client.redirect_uri" migration +""" + +from alembic.config import main as alembic_main +import pytest +from sqlalchemy.exc import IntegrityError + +from fence.models import Client +from fence.utils import random_str + + +@pytest.fixture(scope="function", autouse=True) +def post_test_clean_up(app): + yield + + # clean up the client table + with app.db.session as db_session: + db_session.query(Client).delete() + + # go back to the latest state of the DB + alembic_main(["--raiseerr", "upgrade", "head"]) + + +def test_upgrade(app): + # state before migration + alembic_main(["--raiseerr", "downgrade", "e4c7b0ab68d3"]) + + # before the migration, it should not be possible to create a client + # without a redirect_uri + with app.db.session as db_session: + with pytest.raises(IntegrityError): + db_session.add( + Client( + client_id="client_without_redirect_uri", + name="client_without_redirect_uri_name", + grant_types="client_credentials", + ) + ) + db_session.commit() + db_session.rollback() + + # run the upgrade migration + alembic_main(["--raiseerr", "upgrade", "ea7e1b843f82"]) + + # now it should be possible + with app.db.session as db_session: + db_session.add( + Client( + client_id="client_without_redirect_uri", + name="client_without_redirect_uri_name", + grant_types="client_credentials", + ) + ) + db_session.commit() + query_result = db_session.query(Client).all() + + # make sure the client was created + assert len(query_result) == 1, query_result + assert query_result[0].client_id == "client_without_redirect_uri" + assert query_result[0].redirect_uri == None + + +def test_downgrade(app): + # state after migration + alembic_main(["--raiseerr", "downgrade", "ea7e1b843f82"]) + + with app.db.session as db_session: + # it should possible to create a client without a redirect_uri + db_session.add( + Client( + client_id="client_without_redirect_uri", + name="client_without_redirect_uri_name", + grant_types="client_credentials", + ) + ) + # also create a client with a redirect_uri + db_session.add( + Client( + client_id="client_with_redirect_uri", + name="client_with_redirect_uri_name", + grant_types="client_credentials", + redirect_uri="http://localhost/redirect", + ) + ) + query_result = db_session.query(Client).all() + + # make sure the clients were created + assert len(query_result) == 2, query_result + + client_without_redirect_uri = [ + c for c in query_result if c.client_id == "client_without_redirect_uri" + ] + assert len(client_without_redirect_uri) == 1 + assert client_without_redirect_uri[0].redirect_uri == None + + client_with_redirect_uri = [ + c for c in query_result if c.client_id == "client_with_redirect_uri" + ] + assert len(client_with_redirect_uri) == 1 + assert client_with_redirect_uri[0].redirect_uri == "http://localhost/redirect" + + # run the downgrade migration + alembic_main(["--raiseerr", "downgrade", "e4c7b0ab68d3"]) + + with app.db.session as db_session: + query_result = db_session.query(Client).all() + assert len(query_result) == 2, query_result + + # make sure the client without redirect was migrated to have an empty + # string as redirect_uri instead of null + client_without_redirect_uri = [ + c for c in query_result if c.client_id == "client_without_redirect_uri" + ] + assert len(client_without_redirect_uri) == 1 + assert client_without_redirect_uri[0].redirect_uri == "" + + # make sure the client with redirect is unchanged + client_with_redirect_uri = [ + c for c in query_result if c.client_id == "client_with_redirect_uri" + ] + assert len(client_with_redirect_uri) == 1 + assert client_with_redirect_uri[0].redirect_uri == "http://localhost/redirect" diff --git a/tests/rfc6749/test_oauth2.py b/tests/rfc6749/test_oauth2.py index a8001b8bd..cc1c3d3f3 100644 --- a/tests/rfc6749/test_oauth2.py +++ b/tests/rfc6749/test_oauth2.py @@ -2,6 +2,7 @@ Test the endpoints in the ``/oauth2`` blueprint. """ +import jwt import pytest from fence.jwt.token import SCOPE_DESCRIPTION @@ -43,6 +44,22 @@ def test_oauth2_token_post(oauth_test_client): oauth_test_client.authorize(data=data) oauth_test_client.token() + response = oauth_test_client.token_response.response + assert response.status_code == 200, response.json + response = response.json + assert "id_token" in response + assert "access_token" in response + assert "refresh_token" in response + assert "expires_in" in response + assert response.get("token_type") == "Bearer" + + payload = jwt.decode(response["access_token"], verify=False) + assert payload.get("iss") == "http://localhost/user" + assert payload.get("azp") == oauth_test_client.client_id + assert "context" in payload + assert payload.get("context", {}).get("user", {}).get("name") == "test" + assert payload.get("scope") == ["openid", "user"] + def test_oauth2_token_post_public_client(oauth_test_client_public): """Test ``POST /oauth2/token`` for public client.""" @@ -86,3 +103,62 @@ def test_oauth2_token_post_revoke(oauth_test_client): oauth_test_client.refresh(refresh_token, do_asserts=False) response = oauth_test_client.refresh_response.response assert response.status_code == 400 + + +def test_oauth2_with_client_credentials( + oauth_client_with_client_credentials, oauth_test_client_with_client_credentials +): + """ + Test that a client with the client_credentials grant can exchange its + client ID and secret for an access token + """ + # hit /oauth2/token + oauth_test_client_with_client_credentials.token( + scope=" ".join(oauth_client_with_client_credentials.scopes) + ) + + response = oauth_test_client_with_client_credentials.token_response.response + assert response.status_code == 200, response.json + response = response.json + assert "access_token" in response + assert "expires_in" in response + assert response.get("token_type") == "Bearer" + + payload = jwt.decode(response["access_token"], verify=False) + assert payload.get("iss") == "http://localhost/user" + assert payload.get("azp") == oauth_test_client_with_client_credentials.client_id + assert payload.get("context") == {} # no user linked to this token + assert payload.get("scope") == oauth_client_with_client_credentials.scopes + + +def test_oauth2_with_client_credentials_bad_scope( + oauth_test_client_with_client_credentials, +): + """ + Test that a client with the client_credentials grant cannot exchange its + client ID and secret for an access token when requesting a scope it does + not have + """ + # hit /oauth2/token + oauth_test_client_with_client_credentials.token( + scope="openid unknown-scope", do_asserts=False + ) + + response = oauth_test_client_with_client_credentials.token_response.response + assert response.status_code == 400, response.json + assert response.json.get("error") == "invalid_scope" + + +def test_oauth2_without_client_credentials(oauth_test_client): + """ + Test that a client without the client_credentials grant cannot exchange its + client ID and secret for an access token + """ + oauth_test_client.authorize(data={"confirm": "yes"}) + + oauth_test_client.grant_types = ["client_credentials"] + oauth_test_client.token(do_asserts=False) # hit /oauth2/token + + response = oauth_test_client.token_response.response + assert response.status_code == 400, response.json + assert response.json.get("error") == "unauthorized_client" diff --git a/tests/scripting/test_fence-create.py b/tests/scripting/test_fence-create.py index 2cd5911a1..192d0447c 100644 --- a/tests/scripting/test_fence-create.py +++ b/tests/scripting/test_fence-create.py @@ -159,10 +159,54 @@ def to_test(): client_after = db_session.query(Client).filter_by(name=client_name).all() assert len(client_after) == 0 + with pytest.raises(ValueError): + create_client_action_wrapper( + to_test, + client_name=client_name, + allowed_scopes=["openid", "user", "data", "invalid_scope"], + ) + + +def test_create_client_without_user_and_url(db_session): + """ + Test that a client with the authorization_code grant cannot be created + without providing a username or redirect URLs. + """ + client_name = "client_with_client_credentials" + grant_types = ["authorization_code", "client_credentials"] + + def to_test(): + client_after = db_session.query(Client).filter_by(name=client_name).all() + assert len(client_after) == 0 + + with pytest.raises(AssertionError): + create_client_action_wrapper( + to_test, + client_name=client_name, + username=None, + urls=None, + grant_types=grant_types, + ) + + +def test_create_client_with_client_credentials(db_session): + """ + Test that a client with the client_credentials grant can be created + without providing a username or redirect URLs. + """ + client_name = "client_with_client_credentials" + grant_types = ["client_credentials"] + + def to_test(): + saved_client = db_session.query(Client).filter_by(name=client_name).first() + assert saved_client.grant_types == grant_types + create_client_action_wrapper( to_test, client_name=client_name, - allowed_scopes=["openid", "user", "data", "invalid_scope"], + username=None, + urls=None, + grant_types=grant_types, ) @@ -172,7 +216,13 @@ def test_client_delete(app, db_session, cloud_manager, test_user_a): service accounts and the client themself. """ client_name = "test123" - client = Client(client_id=client_name, client_secret="secret", name=client_name) + client = Client( + client_id=client_name, + client_secret="secret", + name=client_name, + user=User(username="client_user"), + redirect_uris="localhost", + ) db_session.add(client) db_session.commit() @@ -207,7 +257,13 @@ def test_client_delete_error(app, db_session, cloud_manager, test_user_a): we don't remove it from the db. """ client_name = "test123" - client = Client(client_id=client_name, client_secret="secret", name=client_name) + client = Client( + client_id=client_name, + client_secret="secret", + name=client_name, + user=User(username="client_user"), + redirect_uris=["localhost"], + ) db_session.add(client) db_session.commit() @@ -1261,7 +1317,13 @@ def test_delete_expired_service_account_keys_both_user_and_client( def test_list_client_action(db_session, capsys): client_name = "test123" - client = Client(client_id=client_name, client_secret="secret", name=client_name) + client = Client( + client_id=client_name, + client_secret="secret", + name=client_name, + user=User(username="client_user"), + redirect_uris=["localhost"], + ) db_session.add(client) db_session.commit() list_client_action(db_session) @@ -1274,7 +1336,13 @@ def test_list_client_action(db_session, capsys): def test_modify_client_action(db_session): client_id = "testid" client_name = "test123" - client = Client(client_id=client_id, client_secret="secret", name=client_name) + client = Client( + client_id=client_id, + client_secret="secret", + name=client_name, + user=User(username="client_user"), + redirect_uris=["localhost"], + ) db_session.add(client) db_session.commit() modify_client_action( @@ -1387,6 +1455,8 @@ def test_modify_client_action_modify_allowed_scopes(db_session): client_secret="secret", # pragma: allowlist secret name=client_name, _allowed_scopes="openid user data", + user=User(username="client_user"), + redirect_uris=["localhost"], ) db_session.add(client) db_session.commit() @@ -1415,6 +1485,8 @@ def test_modify_client_action_modify_allowed_scopes_append_true(db_session): client_secret="secret", # pragma: allowlist secret name=client_name, _allowed_scopes="openid user data", + user=User(username="client_user"), + redirect_uris=["localhost"], ) db_session.add(client) db_session.commit() @@ -1444,6 +1516,7 @@ def test_modify_client_action_modify_append_url(db_session): client_secret="secret", # pragma: allowlist secret name=client_name, _allowed_scopes="openid user data", + user=User(username="client_user"), redirect_uris="abcd", ) db_session.add(client) diff --git a/tests/test_datamodel.py b/tests/test_datamodel.py index 3284ced70..b07232af2 100644 --- a/tests/test_datamodel.py +++ b/tests/test_datamodel.py @@ -20,6 +20,7 @@ def test_user_delete_cascade(db_session): user=user, client_id=random_str(40), client_secret=random_str(60), + redirect_uris=["localhost"], ) db_session.add(user) db_session.add(client) diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index adebec098..ba3f72370 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -457,6 +457,31 @@ def authorized_upload_context_claims(user_name, user_id): } +def client_authorized_download_context_claims(): + """ + Return a claims dictionary to put in a JWT. These claims do not contain + a `sub` or `context.user.name`, to mimic a token issued from the + `client_credentials` flow. + + Return: + dict: dictionary of claims + """ + iss = config["BASE_URL"] + jti = new_jti() + iat, exp = iat_and_exp() + return { + "aud": [iss], + "iss": iss, + "iat": iat, + "exp": exp, + "jti": jti, + "azp": "test_client_id", + "pur": "access", + "scope": ["access", "data", "user", "openid"], + "context": {}, + } + + class FakeFlaskRequest(object): """ Make a fake ``flask.request`` to patch in tests. diff --git a/tests/utils/oauth2/client.py b/tests/utils/oauth2/client.py index 8e88ac17e..d8744fda5 100644 --- a/tests/utils/oauth2/client.py +++ b/tests/utils/oauth2/client.py @@ -50,8 +50,9 @@ class TokenResponse(object): id_token (dict) """ - def __init__(self, response): + def __init__(self, response, grant_types): self.response = response + self.grant_types = grant_types try: self.access_token = response.json.get("access_token") self.refresh_token = response.json.get("refresh_token") @@ -64,8 +65,13 @@ def __init__(self, response): def do_asserts(self): assert self.response.status_code == 200, self.response.json assert "access_token" in self.response.json - assert "refresh_token" in self.response.json - assert "id_token" in self.response.json + + if any( + g in self.grant_types + for g in ["authorization_code", "refresh_token", "implicit"] + ): + assert "refresh_token" in self.response.json + assert "id_token" in self.response.json class OAuth2TestClient(object): @@ -127,6 +133,7 @@ def __init__(self, flask_client, oauth_client, confidential=True): self._client = flask_client self.client_id = oauth_client.client_id self.url = oauth_client.url + self.grant_types = oauth_client.grant_types if confidential: self.client_secret = oauth_client.client_secret self._auth_header = tests.utils.oauth2.create_basic_header( @@ -209,11 +216,13 @@ def authorize(self, method="POST", data=None, do_asserts=True, include_auth=True elif method == "POST": assert response.status_code == 200, response # Check that the redirect does go to the correct URL. - assert self.authorize_response.location.startswith(self.url) + assert self.authorize_response.location.startswith( + self.url + ), f"Expected location '{self.authorize_response.location}' to start with '{self.url}'" return self.authorize_response - def token(self, code=None, data=None, do_asserts=True, include_auth=True): + def token(self, code=None, data=None, do_asserts=True, include_auth=True, scope=""): """ Make a request to the token endpoint to get a set of tokens. @@ -223,24 +232,35 @@ def token(self, code=None, data=None, do_asserts=True, include_auth=True): ``self.authorize_response.code`` data (Optional[dict]): parameters to include in request do_asserts (bool): whether to call asserts on token response + include_auth (bool) + scope (str): space-separated list of requested scopes """ - if not code and not self.authorize_response: - raise ValueError("no code provided") - code = code or self.authorize_response.code data = data or {} - default_data = { - "client_id": self.client_id, - "code": code, - "grant_type": "authorization_code", - "redirect_uri": self.url, - } - default_data.update(data) - data = default_data + default_data = {"client_id": self.client_id} if self.client_secret and include_auth: - data["client_secret"] = self.client_secret + default_data["client_secret"] = self.client_secret + + if any( + g in self.grant_types + for g in ["authorization_code", "refresh_token", "implicit"] + ): + if not code and not self.authorize_response: + raise ValueError("no code provided") + code = code or self.authorize_response.code + default_data["grant_type"] = "authorization_code" + default_data["code"] = code + default_data["redirect_uri"] = self.url + elif "client_credentials" in self.grant_types: + default_data["grant_type"] = "client_credentials" + if scope: + default_data["scope"] = scope + + default_data.update(data) headers = self._auth_header if include_auth else {} - response = self._client.post(self.PATH_TOKEN, headers=headers, data=data) - self.token_response = TokenResponse(response) + response = self._client.post( + self.PATH_TOKEN, headers=headers, data=default_data + ) + self.token_response = TokenResponse(response, self.grant_types) if do_asserts: self.token_response.do_asserts() return self.token_response @@ -272,7 +292,7 @@ def refresh(self, refresh_token=None, do_asserts=True, data=None): response = self._client.post( self.PATH_REFRESH, headers=self._auth_header, data=data ) - self.refresh_response = TokenResponse(response) + self.refresh_response = TokenResponse(response, self.grant_types) if do_asserts: self.refresh_response.do_asserts() return self.refresh_response From 95c4bbf76813d0ae2baf4a8207b9923142723f60 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 17 Aug 2022 13:08:41 -0500 Subject: [PATCH 15/52] Sem ver release --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 790e965d3..cdb69fcb6 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "fence" -version = "6.0.1" +version = "6.1.0" description = "Gen3 AuthN/AuthZ OIDC Service" authors = ["CTDS UChicago "] license = "Apache-2.0" From 10b7cd23437587421e6445c999a3554c915f1101 Mon Sep 17 00:00:00 2001 From: Clint Date: Tue, 23 Aug 2022 11:15:26 -0700 Subject: [PATCH 16/52] Chore: Testing modified pipeline --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 206ed5c65..5a5fcf68b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,7 +13,7 @@ concurrency: jobs: Security: name: Secure and Lint - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@master + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO13_pipeline_fix secrets: inherit ci: From 8d7844f18b7035aaf124f1ac7480fc3208d3fff5 Mon Sep 17 00:00:00 2001 From: Clint Date: Tue, 23 Aug 2022 11:20:32 -0700 Subject: [PATCH 17/52] Empty-Commit From ce0d95b6b448301a94edb5c049a8b38fe17d6efe Mon Sep 17 00:00:00 2001 From: Clint Date: Tue, 23 Aug 2022 11:34:04 -0700 Subject: [PATCH 18/52] Chore: Testing modified pipeline --- .github/workflows/{ci.yaml => pipeline.yaml} | 2 +- workflowstodelete/image_build_push.yaml | 13 ------------- workflowstodelete/wool.yml | 17 ----------------- 3 files changed, 1 insertion(+), 31 deletions(-) rename .github/workflows/{ci.yaml => pipeline.yaml} (96%) delete mode 100644 workflowstodelete/image_build_push.yaml delete mode 100644 workflowstodelete/wool.yml diff --git a/.github/workflows/ci.yaml b/.github/workflows/pipeline.yaml similarity index 96% rename from .github/workflows/ci.yaml rename to .github/workflows/pipeline.yaml index 5a5fcf68b..14b62cd9f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/pipeline.yaml @@ -12,7 +12,7 @@ concurrency: jobs: Security: - name: Secure and Lint + name: Security Pipeline uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO13_pipeline_fix secrets: inherit diff --git a/workflowstodelete/image_build_push.yaml b/workflowstodelete/image_build_push.yaml deleted file mode 100644 index bea11d288..000000000 --- a/workflowstodelete/image_build_push.yaml +++ /dev/null @@ -1,13 +0,0 @@ -name: Build Image and Push to Quay - -on: push - -jobs: - ci: - name: Build Image and Push to Quay - uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master - secrets: - ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }} - ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }} - QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }} - QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} diff --git a/workflowstodelete/wool.yml b/workflowstodelete/wool.yml deleted file mode 100644 index 59e96ae2f..000000000 --- a/workflowstodelete/wool.yml +++ /dev/null @@ -1,17 +0,0 @@ -on: - pull_request: - issue_comment: - types: [created, edited] - -name: Wool - -jobs: - runWool: - name: Run black - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@master - - - uses: uc-cdis/wool@master - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From cc3e9d1601c7fe3d9175f54c9756fabda540ed0f Mon Sep 17 00:00:00 2001 From: cmlsn <100160785+cmlsn@users.noreply.github.com> Date: Thu, 25 Aug 2022 12:27:20 -0700 Subject: [PATCH 19/52] Update pipeline.yaml --- .github/workflows/pipeline.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pipeline.yaml b/.github/workflows/pipeline.yaml index 14b62cd9f..50e89cf00 100644 --- a/.github/workflows/pipeline.yaml +++ b/.github/workflows/pipeline.yaml @@ -13,7 +13,7 @@ concurrency: jobs: Security: name: Security Pipeline - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@SDO13_pipeline_fix + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@master secrets: inherit ci: From 6b1d42dfd4af05390d08bf4c649f3ddd1b154ea0 Mon Sep 17 00:00:00 2001 From: Alexander VT Date: Fri, 26 Aug 2022 11:55:40 -0500 Subject: [PATCH 20/52] fix(gs-cache): cleanup cache usage, don't try to save key_db_entry now that we already have expiration --- fence/blueprints/data/indexd.py | 34 ++++++++++++++------------------- tests/data/test_indexed_file.py | 4 +--- 2 files changed, 15 insertions(+), 23 deletions(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 91e477087..449976f6b 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1105,7 +1105,7 @@ class GoogleStorageIndexedFileLocation(IndexedFileLocation): _assume_role_cache_gs is used for in mem caching of GCP role credentials """ - # expected structore { proxy_group_id: (private_key, key_db_entry) } + # expected structore { proxy_group_id: (private_key, expires_at) } _assume_role_cache_gs = {} def get_resource_path(self): @@ -1196,13 +1196,13 @@ def _generate_google_storage_signed_url( if proxy_group_id in self._assume_role_cache_gs: ( raw_private_key, - raw_key_db_entry, expires_at, - ) = self._assume_role_cache_gs.get(proxy_group_id, (None, None, None)) - if raw_key_db_entry and raw_key_db_entry.expires > expiration_time: + ) = self._assume_role_cache_gs.get(proxy_group_id, (None, None)) + + if expires_at and expires_at > expiration_time: is_cached = True private_key = raw_private_key - key_db_entry = raw_key_db_entry + expires_at = expires_at else: del self._assume_role_cache_gs[proxy_group_id] @@ -1214,23 +1214,17 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: - rv = ( - json.loads(cache.gcp_private_key), - json.loads(cache.gcp_key_db_entry), - cache.expires_at, - ) - self._assume_role_cache_gs[proxy_group_id] = rv - ( + private_key = json.loads(cache.gcp_private_key) + expires_at = (cache.expires_at,) + self._assume_role_cache_gs[proxy_group_id] = ( private_key, - key_db_entry, expires_at, - ) = self._assume_role_cache_gs.get( - proxy_group_id, (None, None, None) ) is_cached = True - # check again to see if we cached the creds if not we need to - if is_cached == False: + # check again to see if we got cached creds from the database, + # if not we need to actually get the creds and then cache them + if not is_cached: private_key, key_db_entry = get_or_create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) @@ -1251,18 +1245,18 @@ def _generate_google_storage_signed_url( ) self._assume_role_cache_gs[proxy_group_id] = ( private_key, - key_db_entry, key_db_entry.expires, ) db_entry = {} db_entry["gcp_proxy_group_id"] = proxy_group_id db_entry["gcp_private_key"] = json.dumps(str(private_key)) - db_entry["gcp_key_db_entry"] = str(key_db_entry) db_entry["expires_at"] = key_db_entry.expires if hasattr(flask.current_app, "db"): # we don't have db in startup with flask.current_app.db.session as session: + # we don't need to populate gcp_key_db_entry anymore, it was for + # expiration, but now we have a specific field for that. session.execute( """\ INSERT INTO gcp_assume_role_cache ( @@ -1274,7 +1268,7 @@ def _generate_google_storage_signed_url( :expires_at, :gcp_proxy_group_id, :gcp_private_key, - :gcp_key_db_entry + NULL ) ON CONFLICT (gcp_proxy_group_id) DO UPDATE SET expires_at = EXCLUDED.expires_at, gcp_proxy_group_id = EXCLUDED.gcp_proxy_group_id, diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index d280a3b6e..64129aa9a 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -441,9 +441,7 @@ def test_internal_get_gs_signed_url_cache_new_key_if_old_key_expired( ): indexed_file = IndexedFile(file_id="some id") google_object = GoogleStorageIndexedFileLocation("gs://some/location") - keydbentry = UserGoogleAccountToProxyGroup() - keydbentry.expires = 10 - google_object._assume_role_cache_gs = {"1": ("key", keydbentry, 10)} + google_object._assume_role_cache_gs = {"1": ("key", 10)} assert google_object._assume_role_cache_gs before_cache = db_session.query(AssumeRoleCacheGCP).first() From bd3be06f628af04806153a971c1a8d4e3bb7776e Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 30 Aug 2022 10:59:17 -0500 Subject: [PATCH 21/52] remove expires at tuple --- fence/blueprints/data/indexd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 449976f6b..a2e69f5a0 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1215,7 +1215,7 @@ def _generate_google_storage_signed_url( ) if cache and cache.expires_at > expiration_time: private_key = json.loads(cache.gcp_private_key) - expires_at = (cache.expires_at,) + expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( private_key, expires_at, From 734f827f0e4d4a993229c6328838a4f5559d9a99 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 30 Aug 2022 15:16:34 -0500 Subject: [PATCH 22/52] json formatting --- fence/blueprints/data/indexd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index a2e69f5a0..01254906e 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1201,7 +1201,7 @@ def _generate_google_storage_signed_url( if expires_at and expires_at > expiration_time: is_cached = True - private_key = raw_private_key + private_key = json.loads(raw_private_key.replace("'", '"')) expires_at = expires_at else: del self._assume_role_cache_gs[proxy_group_id] From 2358fd36010d4d0970fea72ff121da60325fc6c3 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 30 Aug 2022 17:18:12 -0500 Subject: [PATCH 23/52] Fix json --- fence/blueprints/data/indexd.py | 2 +- poetry.lock | 273 ++++++++++++++++++++++++++------ 2 files changed, 225 insertions(+), 50 deletions(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 01254906e..3e6b29080 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1201,7 +1201,7 @@ def _generate_google_storage_signed_url( if expires_at and expires_at > expiration_time: is_cached = True - private_key = json.loads(raw_private_key.replace("'", '"')) + private_key = json.loads(str(raw_private_key).replace("'", '"')) expires_at = expires_at else: del self._assume_role_cache_gs[proxy_group_id] diff --git a/poetry.lock b/poetry.lock index a6eba3293..6fad57faa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -52,17 +52,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "authlib" @@ -111,7 +111,7 @@ typing-extensions = ">=4.0.1" [[package]] name = "azure-storage-blob" -version = "12.13.0" +version = "12.13.1" description = "Microsoft Azure Blob Storage Client Library for Python" category = "main" optional = false @@ -232,25 +232,15 @@ version = "2.0.1" description = "This package includes several utility Python tools for the Gen3 stack." category = "main" optional = false -python-versions = "^3.6" -develop = false +python-versions = ">=3.6,<4.0" [package.dependencies] -cdiserrors = "^1.0.0" +cdiserrors = ">=1.0.0,<2.0.0" cryptography = ">=3.2" Flask = "*" PyJWT = "*" requests = "*" -[package.extras] -profiling = [] - -[package.source] -type = "git" -url = "https://github.com/uc-cdis/cdis-python-utils" -reference = "feat/deps" -resolved_reference = "9b0291b76ae88f9dc059d8f17552d6c55b817eaf" - [[package]] name = "cdisutilstest" version = "0.2.4" @@ -943,7 +933,7 @@ test = ["unittest2 (>=1.1.0)"] [[package]] name = "more-itertools" -version = "8.13.0" +version = "8.14.0" description = "More routines for operating on iterables, beyond itertools" category = "dev" optional = false @@ -1293,7 +1283,7 @@ pycrypto = ["pycrypto (>=2.6.0,<2.7.0)"] [[package]] name = "pytz" -version = "2022.1" +version = "2022.2.1" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -1384,7 +1374,7 @@ idna2008 = ["idna"] [[package]] name = "rsa" -version = "4.8" +version = "4.9" description = "Pure-Python RSA implementation" category = "main" optional = false @@ -1560,7 +1550,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = "^3.6" -content-hash = "5c5e16608b2add61ab7c21c1f94fcb327eebf877c1ae4bae13bc4baf4dc659f3" +content-hash = "a60ba7adf2fc7f93b9505da6a8129b0fd4cd59822b835d483646e41759868158" [metadata.files] addict = [ @@ -1577,16 +1567,19 @@ async-generator = [ {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, ] atomicwrites = [] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] +attrs = [] authlib = [ {file = "Authlib-0.11-py2.py3-none-any.whl", hash = "sha256:3a226f231e962a16dd5f6fcf0c113235805ba206e294717a64fa8e04ae3ad9c4"}, {file = "Authlib-0.11.tar.gz", hash = "sha256:9741db6de2950a0a5cefbdb72ec7ab12f7e9fd530ff47219f1530e79183cbaaf"}, ] -authutils = [] -azure-core = [] +authutils = [ + {file = "authutils-6.1.0-py3-none-any.whl", hash = "sha256:682dba636694c36fb35af1d9ff576bb8436337c3899f0ef434cda5918d661db9"}, + {file = "authutils-6.1.0.tar.gz", hash = "sha256:7263af0b2ce3a0db19236fd123b34f795d07e07111b7bd18a51808568ddfdc2e"}, +] +azure-core = [ + {file = "azure-core-1.24.2.zip", hash = "sha256:0f3a20d245659bf81fb3670070a5410c8d4a43298d5a981e62dce393000a9084"}, + {file = "azure_core-1.24.2-py3-none-any.whl", hash = "sha256:a76856fa83efe1925a4fd917dc179c7daa15917dd71da2774833fa82a96f3dfa"}, +] azure-storage-blob = [] backoff = [ {file = "backoff-1.11.1-py2.py3-none-any.whl", hash = "sha256:61928f8fa48d52e4faa81875eecf308eccfb1016b018bb6bd21e05b5d90a96c5"}, @@ -1638,8 +1631,76 @@ cdislogging = [ ] cdispyutils = [] cdisutilstest = [] -certifi = [] -cffi = [] +certifi = [ + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, @@ -1652,7 +1713,10 @@ codacy-coverage = [ {file = "codacy-coverage-1.3.11.tar.gz", hash = "sha256:b94651934745c638a980ad8d67494077e60f71e19e29aad1c275b66e0a070cbc"}, {file = "codacy_coverage-1.3.11-py2.py3-none-any.whl", hash = "sha256:d8a1ce56b0dd156d6b1de14fa6217d32ec86097902f08a17ff2f95ba27264474"}, ] -colorama = [] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] contextvars = [ {file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"}, ] @@ -1784,7 +1848,10 @@ google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, ] -google-cloud-core = [] +google-cloud-core = [ + {file = "google-cloud-core-2.3.1.tar.gz", hash = "sha256:34334359cb04187bdc80ddcf613e462dfd7a3aabbc3fe4d118517ab4b9303d53"}, + {file = "google_cloud_core-2.3.1-py2.py3-none-any.whl", hash = "sha256:113ba4f492467d5bd442c8d724c1a25ad7384045c3178369038840ecdd19346c"}, +] google-cloud-storage = [ {file = "google-cloud-storage-1.44.0.tar.gz", hash = "sha256:29edbfeedd157d853049302bf5d104055c6f0cb7ef283537da3ce3f730073001"}, {file = "google_cloud_storage-1.44.0-py2.py3-none-any.whl", hash = "sha256:cd4a223e9c18d771721a85c98a9c01b97d257edddff833ba63b7b1f0b9b4d6e9"}, @@ -1838,7 +1905,10 @@ google-resumable-media = [ {file = "google-resumable-media-2.3.3.tar.gz", hash = "sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c"}, {file = "google_resumable_media-2.3.3-py2.py3-none-any.whl", hash = "sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5"}, ] -googleapis-common-protos = [] +googleapis-common-protos = [ + {file = "googleapis-common-protos-1.56.3.tar.gz", hash = "sha256:6f1369b58ed6cf3a4b7054a44ebe8d03b29c309257583a2bbdc064cd1e4a1442"}, + {file = "googleapis_common_protos-1.56.3-py2.py3-none-any.whl", hash = "sha256:87955d7b3a73e6e803f2572a33179de23989ebba725e05ea42f24838b792e461"}, +] h11 = [ {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, @@ -1936,20 +2006,73 @@ markdown = [ {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, ] -markupsafe = [] +markupsafe = [ + {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, + {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, + {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-win32.whl", hash = "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8"}, + {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, +] mock = [ {file = "mock-2.0.0-py2.py3-none-any.whl", hash = "sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1"}, {file = "mock-2.0.0.tar.gz", hash = "sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba"}, ] -more-itertools = [ - {file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"}, - {file = "more_itertools-8.13.0-py3-none-any.whl", hash = "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb"}, -] +more-itertools = [] moto = [ {file = "moto-1.3.15-py2.py3-none-any.whl", hash = "sha256:3be7e1f406ef7e9c222dbcbfd8cefa2cb1062200e26deae49b5df446e17be3df"}, {file = "moto-1.3.15.tar.gz", hash = "sha256:fd98f7b219084ba8aadad263849c4dbe8be73979e035d8dc5c86e11a86f11b7f"}, ] -msrest = [] +msrest = [ + {file = "msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32"}, + {file = "msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9"}, +] oauth2client = [ {file = "oauth2client-3.0.0.tar.gz", hash = "sha256:5b5b056ec6f2304e7920b632885bd157fa71d1a7f3ddd00a43b1541a8d1a2460"}, ] @@ -1981,7 +2104,34 @@ prometheus-flask-exporter = [ {file = "prometheus_flask_exporter-0.18.7-py3-none-any.whl", hash = "sha256:38bc68db295d0f895ad0fb319b1bfd200ae273b33397ce497c9b96dceb708ce9"}, {file = "prometheus_flask_exporter-0.18.7.tar.gz", hash = "sha256:f1f6f23535479d41587a100a24a60cb9199c34986e95f6691496807ee5017e59"}, ] -protobuf = [] +protobuf = [ + {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, + {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, + {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, + {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, + {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, + {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, + {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, + {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, + {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, + {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, + {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, + {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, + {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, + {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, + {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, + {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, + {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, + {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, +] psycopg2 = [ {file = "psycopg2-2.9.3-cp310-cp310-win32.whl", hash = "sha256:083707a696e5e1c330af2508d8fab36f9700b26621ccbcb538abe22e15485362"}, {file = "psycopg2-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:d3ca6421b942f60c008f81a3541e8faf6865a28d5a9b48544b0ee4f40cac7fca"}, @@ -2033,7 +2183,38 @@ pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -pycryptodome = [] +pycryptodome = [ + {file = "pycryptodome-3.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff7ae90e36c1715a54446e7872b76102baa5c63aa980917f4aa45e8c78d1a3ec"}, + {file = "pycryptodome-3.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2ffd8b31561455453ca9f62cb4c24e6b8d119d6d531087af5f14b64bee2c23e6"}, + {file = "pycryptodome-3.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2ea63d46157386c5053cfebcdd9bd8e0c8b7b0ac4a0507a027f5174929403884"}, + {file = "pycryptodome-3.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7c9ed8aa31c146bef65d89a1b655f5f4eab5e1120f55fc297713c89c9e56ff0b"}, + {file = "pycryptodome-3.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:5099c9ca345b2f252f0c28e96904643153bae9258647585e5e6f649bb7a1844a"}, + {file = "pycryptodome-3.15.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:2ec709b0a58b539a4f9d33fb8508264c3678d7edb33a68b8906ba914f71e8c13"}, + {file = "pycryptodome-3.15.0-cp27-cp27m-win32.whl", hash = "sha256:fd2184aae6ee2a944aaa49113e6f5787cdc5e4db1eb8edb1aea914bd75f33a0c"}, + {file = "pycryptodome-3.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:7e3a8f6ee405b3bd1c4da371b93c31f7027944b2bcce0697022801db93120d83"}, + {file = "pycryptodome-3.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:b9c5b1a1977491533dfd31e01550ee36ae0249d78aae7f632590db833a5012b8"}, + {file = "pycryptodome-3.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0926f7cc3735033061ef3cf27ed16faad6544b14666410727b31fea85a5b16eb"}, + {file = "pycryptodome-3.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2aa55aae81f935a08d5a3c2042eb81741a43e044bd8a81ea7239448ad751f763"}, + {file = "pycryptodome-3.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:c3640deff4197fa064295aaac10ab49a0d55ef3d6a54ae1499c40d646655c89f"}, + {file = "pycryptodome-3.15.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:045d75527241d17e6ef13636d845a12e54660aa82e823b3b3341bcf5af03fa79"}, + {file = "pycryptodome-3.15.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ee40e2168f1348ae476676a2e938ca80a2f57b14a249d8fe0d3cdf803e5a676"}, + {file = "pycryptodome-3.15.0-cp35-abi3-manylinux1_i686.whl", hash = "sha256:4c3ccad74eeb7b001f3538643c4225eac398c77d617ebb3e57571a897943c667"}, + {file = "pycryptodome-3.15.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:1b22bcd9ec55e9c74927f6b1f69843cb256fb5a465088ce62837f793d9ffea88"}, + {file = "pycryptodome-3.15.0-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:57f565acd2f0cf6fb3e1ba553d0cb1f33405ec1f9c5ded9b9a0a5320f2c0bd3d"}, + {file = "pycryptodome-3.15.0-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:4b52cb18b0ad46087caeb37a15e08040f3b4c2d444d58371b6f5d786d95534c2"}, + {file = "pycryptodome-3.15.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:092a26e78b73f2530b8bd6b3898e7453ab2f36e42fd85097d705d6aba2ec3e5e"}, + {file = "pycryptodome-3.15.0-cp35-abi3-win32.whl", hash = "sha256:e244ab85c422260de91cda6379e8e986405b4f13dc97d2876497178707f87fc1"}, + {file = "pycryptodome-3.15.0-cp35-abi3-win_amd64.whl", hash = "sha256:c77126899c4b9c9827ddf50565e93955cb3996813c18900c16b2ea0474e130e9"}, + {file = "pycryptodome-3.15.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:9eaadc058106344a566dc51d3d3a758ab07f8edde013712bc8d22032a86b264f"}, + {file = "pycryptodome-3.15.0-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:ff287bcba9fbeb4f1cccc1f2e90a08d691480735a611ee83c80a7d74ad72b9d9"}, + {file = "pycryptodome-3.15.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:60b4faae330c3624cc5a546ba9cfd7b8273995a15de94ee4538130d74953ec2e"}, + {file = "pycryptodome-3.15.0-pp27-pypy_73-win32.whl", hash = "sha256:a8f06611e691c2ce45ca09bbf983e2ff2f8f4f87313609d80c125aff9fad6e7f"}, + {file = "pycryptodome-3.15.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b9cc96e274b253e47ad33ae1fccc36ea386f5251a823ccb50593a935db47fdd2"}, + {file = "pycryptodome-3.15.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:ecaaef2d21b365d9c5ca8427ffc10cebed9d9102749fd502218c23cb9a05feb5"}, + {file = "pycryptodome-3.15.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:d2a39a66057ab191e5c27211a7daf8f0737f23acbf6b3562b25a62df65ffcb7b"}, + {file = "pycryptodome-3.15.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:9c772c485b27967514d0df1458b56875f4b6d025566bf27399d0c239ff1b369f"}, + {file = "pycryptodome-3.15.0.tar.gz", hash = "sha256:9135dddad504592bcc18b0d2d95ce86c3a5ea87ec6447ef25cfedea12d6018b8"}, +] pyjwt = [ {file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"}, {file = "PyJWT-1.7.1.tar.gz", hash = "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"}, @@ -2074,10 +2255,7 @@ python-jose = [ {file = "python-jose-2.0.2.tar.gz", hash = "sha256:391f860dbe274223d73dd87de25e4117bf09e8fe5f93a417663b1f2d7b591165"}, {file = "python_jose-2.0.2-py2.py3-none-any.whl", hash = "sha256:3b35cdb0e55a88581ff6d3f12de753aa459e940b50fe7ca5aa25149bc94cb37b"}, ] -pytz = [ - {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, - {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, -] +pytz = [] pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, @@ -2129,10 +2307,7 @@ rfc3986 = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] -rsa = [ - {file = "rsa-4.8-py3-none-any.whl", hash = "sha256:95c5d300c4e879ee69708c428ba566c59478fd653cc3a22243eeb8ed846950bb"}, - {file = "rsa-4.8.tar.gz", hash = "sha256:5c6bd9dc7a543b7fe4304a631f8a8a3b674e2bbfc49c2ae96200cdbe55df6b17"}, -] +rsa = [] s3transfer = [ {file = "s3transfer-0.2.1-py2.py3-none-any.whl", hash = "sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba"}, {file = "s3transfer-0.2.1.tar.gz", hash = "sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d"}, From 250b8e02d7ce9d882a6024f1e206fac4d6f2c798 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 30 Aug 2022 18:01:13 -0500 Subject: [PATCH 24/52] fix json --- fence/blueprints/data/indexd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 3e6b29080..df717e1c6 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1214,7 +1214,7 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: - private_key = json.loads(cache.gcp_private_key) + private_key = json.loads(str(cache.gcp_private_key).replace("'", '"')) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( private_key, From 4f7f72e3cd723fc0d055b83dd18fce61ba33ce31 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 31 Aug 2022 13:35:38 -0500 Subject: [PATCH 25/52] dumps --- fence/blueprints/data/indexd.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index df717e1c6..08c3bdb52 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1214,7 +1214,9 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: - private_key = json.loads(str(cache.gcp_private_key).replace("'", '"')) + private_key = json.loads( + json.dumps(cache.gcp_private_key) + ) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( private_key, From 6206f8d894129a862b95ad06857a2d9c243c5469 Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 31 Aug 2022 12:50:11 -0700 Subject: [PATCH 26/52] Chore: Testing codecoverage.yaml --- .github/workflows/codecoverage.yaml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 .github/workflows/codecoverage.yaml diff --git a/.github/workflows/codecoverage.yaml b/.github/workflows/codecoverage.yaml new file mode 100644 index 000000000..9619f3c2e --- /dev/null +++ b/.github/workflows/codecoverage.yaml @@ -0,0 +1,24 @@ +name: pytest-coverage-commentator +on: + pull_request: + branches: + - '*' +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 pytest pytest-cov + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Build coverage file + run: | + pytest --cache-clear --cov=app test/ > pytest-coverage.txt + - name: Comment coverage + uses: coroo/pytest-coverage-commentator@v1.0.2 From b313de593b35ea152e3d0963deeefa3cd34710bc Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 31 Aug 2022 12:55:51 -0700 Subject: [PATCH 27/52] Chore: Testing codecoverage.yaml --- .github/workflows/codecoverage.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codecoverage.yaml b/.github/workflows/codecoverage.yaml index 9619f3c2e..088d2ddd3 100644 --- a/.github/workflows/codecoverage.yaml +++ b/.github/workflows/codecoverage.yaml @@ -15,10 +15,10 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 pytest pytest-cov - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + pip install poetry coverage flake8 pytest pytest-cov + if [ -f poetry.lock ]; then pip install -r poetry.lock; fi - name: Build coverage file run: | - pytest --cache-clear --cov=app test/ > pytest-coverage.txt + poetry run coverage report -m pytest --cache-clear --cov=app test/ > pytest-coverage.txt - name: Comment coverage uses: coroo/pytest-coverage-commentator@v1.0.2 From c8617e830baa7beb5be6604f3d217b7f65201e6d Mon Sep 17 00:00:00 2001 From: Clint Date: Wed, 31 Aug 2022 12:58:31 -0700 Subject: [PATCH 28/52] Chore: Testing codecoverage.yaml --- .github/workflows/codecoverage.yaml | 66 +++++++++++++++++++++-------- 1 file changed, 49 insertions(+), 17 deletions(-) diff --git a/.github/workflows/codecoverage.yaml b/.github/workflows/codecoverage.yaml index 088d2ddd3..09d6355bd 100644 --- a/.github/workflows/codecoverage.yaml +++ b/.github/workflows/codecoverage.yaml @@ -1,24 +1,56 @@ -name: pytest-coverage-commentator -on: - pull_request: - branches: - - '*' +name: test + +on: push + jobs: - build: + test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 + #---------------------------------------------- + # check-out repo and set-up python + #---------------------------------------------- + - name: Check out repository + uses: actions/checkout@v2 + - name: Set up python + id: setup-python uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 + #---------------------------------------------- + # ----- install & configure poetry ----- + #---------------------------------------------- + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + #---------------------------------------------- + # load cached venv if cache exists + #---------------------------------------------- + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v2 + with: + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + #---------------------------------------------- + # install dependencies if cache does not exist + #---------------------------------------------- - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + #---------------------------------------------- + # install your root project, if required + #---------------------------------------------- + - name: Install library + run: poetry install --no-interaction + #---------------------------------------------- + # run test suite + #---------------------------------------------- + - name: Run tests run: | - python -m pip install --upgrade pip - pip install poetry coverage flake8 pytest pytest-cov - if [ -f poetry.lock ]; then pip install -r poetry.lock; fi - - name: Build coverage file - run: | - poetry run coverage report -m pytest --cache-clear --cov=app test/ > pytest-coverage.txt - - name: Comment coverage - uses: coroo/pytest-coverage-commentator@v1.0.2 + source .venv/bin/activate + pytest tests/ + coverage report From a425862ca86e6a8c4ed58d8bf9d20364af989472 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 31 Aug 2022 15:17:45 -0500 Subject: [PATCH 29/52] revert and fix json storage --- fence/blueprints/data/indexd.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 08c3bdb52..8fcd8caf3 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1201,7 +1201,7 @@ def _generate_google_storage_signed_url( if expires_at and expires_at > expiration_time: is_cached = True - private_key = json.loads(str(raw_private_key).replace("'", '"')) + private_key = raw_private_key expires_at = expires_at else: del self._assume_role_cache_gs[proxy_group_id] @@ -1214,9 +1214,7 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: - private_key = json.loads( - json.dumps(cache.gcp_private_key) - ) + private_key = json.loads(cache.gcp_private_key) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( private_key, @@ -1252,7 +1250,7 @@ def _generate_google_storage_signed_url( db_entry = {} db_entry["gcp_proxy_group_id"] = proxy_group_id - db_entry["gcp_private_key"] = json.dumps(str(private_key)) + db_entry["gcp_private_key"] = str(private_key) db_entry["expires_at"] = key_db_entry.expires if hasattr(flask.current_app, "db"): # we don't have db in startup From 2840843a053729d3fde9121503dba2202e171776 Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 1 Sep 2022 11:59:42 -0500 Subject: [PATCH 30/52] debug --- fence/blueprints/data/indexd.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 8fcd8caf3..dc81e8617 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1200,6 +1200,7 @@ def _generate_google_storage_signed_url( ) = self._assume_role_cache_gs.get(proxy_group_id, (None, None)) if expires_at and expires_at > expiration_time: + print("-------------0--------------------") is_cached = True private_key = raw_private_key expires_at = expires_at @@ -1214,6 +1215,8 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: + print("-------------1--------------------") + private_key = json.loads(cache.gcp_private_key) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( @@ -1225,6 +1228,8 @@ def _generate_google_storage_signed_url( # check again to see if we got cached creds from the database, # if not we need to actually get the creds and then cache them if not is_cached: + print("-------------2--------------------") + private_key, key_db_entry = get_or_create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) @@ -1240,6 +1245,8 @@ def _generate_google_storage_signed_url( # before the expiration of the url then the url will NOT work # (even though the url itself isn't expired) if key_db_entry.expires < expiration_time: + print("-------------3--------------------") + private_key = create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) From 3f42d53a07e010d69b6be449c4ca6f5a3955958c Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 1 Sep 2022 14:15:48 -0500 Subject: [PATCH 31/52] clean up --- fence/blueprints/data/indexd.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index dc81e8617..8fcd8caf3 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1200,7 +1200,6 @@ def _generate_google_storage_signed_url( ) = self._assume_role_cache_gs.get(proxy_group_id, (None, None)) if expires_at and expires_at > expiration_time: - print("-------------0--------------------") is_cached = True private_key = raw_private_key expires_at = expires_at @@ -1215,8 +1214,6 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: - print("-------------1--------------------") - private_key = json.loads(cache.gcp_private_key) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( @@ -1228,8 +1225,6 @@ def _generate_google_storage_signed_url( # check again to see if we got cached creds from the database, # if not we need to actually get the creds and then cache them if not is_cached: - print("-------------2--------------------") - private_key, key_db_entry = get_or_create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) @@ -1245,8 +1240,6 @@ def _generate_google_storage_signed_url( # before the expiration of the url then the url will NOT work # (even though the url itself isn't expired) if key_db_entry.expires < expiration_time: - print("-------------3--------------------") - private_key = create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) From 577f403eca9f0a930eb008cd7ef8e239d50eb87d Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 1 Sep 2022 14:43:04 -0500 Subject: [PATCH 32/52] fix cache --- fence/blueprints/data/indexd.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 8fcd8caf3..ae4bb650d 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1214,7 +1214,9 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: - private_key = json.loads(cache.gcp_private_key) + private_key = json.loads( + str(cache.gcp_private_key).replace("'", '"') + ) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( private_key, From 6dd441bc1ed6eef161385e1c08f238bfcd0ffcfa Mon Sep 17 00:00:00 2001 From: BinamB Date: Thu, 1 Sep 2022 15:13:05 -0500 Subject: [PATCH 33/52] resolve --- fence/blueprints/data/indexd.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index ae4bb650d..3b1a31e08 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1214,9 +1214,7 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: - private_key = json.loads( - str(cache.gcp_private_key).replace("'", '"') - ) + private_key = json.loads(cache.gcp_private_key) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( private_key, @@ -1252,7 +1250,7 @@ def _generate_google_storage_signed_url( db_entry = {} db_entry["gcp_proxy_group_id"] = proxy_group_id - db_entry["gcp_private_key"] = str(private_key) + db_entry["gcp_private_key"] = json.dumps(private_key) db_entry["expires_at"] = key_db_entry.expires if hasattr(flask.current_app, "db"): # we don't have db in startup From c370f30f928fee51f8fd452444d9c39b9aa79863 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 11:36:44 -0500 Subject: [PATCH 34/52] add unit test --- tests/data/test_indexed_file.py | 102 ++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index 64129aa9a..6a720bb15 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -459,6 +459,108 @@ def test_internal_get_gs_signed_url_cache_new_key_if_old_key_expired( assert before_cache != after_cache +@mock.patch.object(utils, "_get_proxy_group_id", return_value=None) +@mock.patch.object(indexd, "get_or_create_proxy_group_id", return_value="1") +def test_internal_get_gs_signed_url_clear_cache( + mock_get_or_create_proxy_group_id, + mock_get_proxy_group_id, + app, + indexd_client_accepting_record, + db_session, +): + """ + Test fence.blueprints.data.indexd.GoogleStorageIndexedFileLocation._generate_google_storage_signed_url does not use cached key if its expired + + create presigned url + set cache in db + clear cache + create presigned url again + make sure cache is set correctly + """ + # db_session.add( + # AssumeRoleCacheGCP( + # gcp_proxy_group_id="1", + # expires_at=0, + # gcp_private_key="key", + # gcp_key_db_entry='{"1":("key", keydbentry)}', + # ) + # ) + # db_session.commit() + + indexd_record_with_non_public_authz_and_public_acl_populated = { + "urls": [f"gs://some/location"], + "authz": ["/programs/DEV/projects/test"], + "acl": ["*"], + } + indexd_client_accepting_record( + indexd_record_with_non_public_authz_and_public_acl_populated + ) + + mock_google_service_account_key = GoogleServiceAccountKey() + mock_google_service_account_key.expires = 10 + mock_google_service_account_key.private_key = "key" + + with mock.patch( + "fence.blueprints.data.indexd.get_or_create_primary_service_account_key", + return_value=("sa_private_key", mock_google_service_account_key), + ): + with mock.patch( + "fence.blueprints.data.indexd.create_primary_service_account_key", + return_value=("sa_private_key"), + ): + with mock.patch.object( + cirrus.google_cloud.utils, + "get_signed_url", + return_value="https://cloud.google.com/compute/url", + ): + indexed_file = IndexedFile(file_id="some id") + google_object = GoogleStorageIndexedFileLocation("gs://some/location") + google_object._assume_role_cache_gs = {"1": ("key", 10)} + + assert google_object._assume_role_cache_gs + before_cache = db_session.query(AssumeRoleCacheGCP).first() + + google_object._generate_google_storage_signed_url( + http_verb="GET", + resource_path="gs://some/location", + expires_in=0, + user_id=1, + username="some user", + r_pays_project=None, + ) + + after_cache = db_session.query(AssumeRoleCacheGCP).first() + + assert after_cache + assert before_cache != after_cache + assert ( + str(type(after_cache)) + == "" + ) + + db_session.delete(after_cache) + cleared_cache = db_session.query(AssumeRoleCacheGCP).first() + + assert cleared_cache is None + + google_object._generate_google_storage_signed_url( + http_verb="GET", + resource_path="gs://some/location", + expires_in=0, + user_id=1, + username="some user", + r_pays_project=None, + ) + + redo_cache = db_session.query(AssumeRoleCacheGCP).first() + + assert redo_cache + assert cleared_cache != redo_cache + assert ( + str(type(redo_cache)) == "" + ) + + def test_set_acl_missing_unauthorized( app, supported_protocol, indexd_client_accepting_record ): From 7097e67e8aab2ab75a518341af9232ceb6715383 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 12:00:03 -0500 Subject: [PATCH 35/52] prints --- fence/blueprints/data/indexd.py | 18 ++++++++++++++++++ tests/data/test_indexed_file.py | 4 +++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 3b1a31e08..7509c8ecc 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1190,6 +1190,7 @@ def _generate_google_storage_signed_url( proxy_group_id = get_or_create_proxy_group_id( user_id=user_id, username=username ) + print("------------------0--------------------") expiration_time = int(time.time()) + expires_in is_cached = False @@ -1198,6 +1199,9 @@ def _generate_google_storage_signed_url( raw_private_key, expires_at, ) = self._assume_role_cache_gs.get(proxy_group_id, (None, None)) + print("------------------1----------------") + print(self._assume_role_cache_gs) + print(raw_private_key) if expires_at and expires_at > expiration_time: is_cached = True @@ -1214,7 +1218,10 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: + print("------------------2----------------") + private_key = json.loads(cache.gcp_private_key) + print(private_key) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( private_key, @@ -1228,6 +1235,8 @@ def _generate_google_storage_signed_url( private_key, key_db_entry = get_or_create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) + print("------------------3---------------") + print(private_key) # Make sure the service account key expiration is later # than the expiration for the signed url. If it's not, we need to @@ -1243,6 +1252,8 @@ def _generate_google_storage_signed_url( private_key = create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) + print("------------------4----------------") + print(private_key) self._assume_role_cache_gs[proxy_group_id] = ( private_key, key_db_entry.expires, @@ -1250,6 +1261,9 @@ def _generate_google_storage_signed_url( db_entry = {} db_entry["gcp_proxy_group_id"] = proxy_group_id + print("------------------5----------------") + print(private_key) + print(type(private_key)) db_entry["gcp_private_key"] = json.dumps(private_key) db_entry["expires_at"] = key_db_entry.expires @@ -1287,6 +1301,10 @@ def _generate_google_storage_signed_url( # use configured project if it exists and no user project was given if config["BILLING_PROJECT_FOR_SIGNED_URLS"] and not r_pays_project: r_pays_project = config["BILLING_PROJECT_FOR_SIGNED_URLS"] + + print("------------------6----------------") + print(private_key) + final_url = cirrus.google_cloud.utils.get_signed_url( resource_path, http_verb, diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index 6a720bb15..0133e59ec 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -517,7 +517,6 @@ def test_internal_get_gs_signed_url_clear_cache( google_object = GoogleStorageIndexedFileLocation("gs://some/location") google_object._assume_role_cache_gs = {"1": ("key", 10)} - assert google_object._assume_role_cache_gs before_cache = db_session.query(AssumeRoleCacheGCP).first() google_object._generate_google_storage_signed_url( @@ -554,6 +553,9 @@ def test_internal_get_gs_signed_url_clear_cache( redo_cache = db_session.query(AssumeRoleCacheGCP).first() + print("----------------------------------------------------") + print(redo_cache.gcp_proxy_group_id) + assert redo_cache assert cleared_cache != redo_cache assert ( From aba35ddd01aa2db620e136a66a4d0ebb73a68289 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 14:19:00 -0500 Subject: [PATCH 36/52] add json parse test --- fence/blueprints/data/indexd.py | 18 ------------ tests/data/test_indexed_file.py | 49 ++++++++++++++++++++------------- 2 files changed, 30 insertions(+), 37 deletions(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 7509c8ecc..00502e129 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1190,7 +1190,6 @@ def _generate_google_storage_signed_url( proxy_group_id = get_or_create_proxy_group_id( user_id=user_id, username=username ) - print("------------------0--------------------") expiration_time = int(time.time()) + expires_in is_cached = False @@ -1199,9 +1198,6 @@ def _generate_google_storage_signed_url( raw_private_key, expires_at, ) = self._assume_role_cache_gs.get(proxy_group_id, (None, None)) - print("------------------1----------------") - print(self._assume_role_cache_gs) - print(raw_private_key) if expires_at and expires_at > expiration_time: is_cached = True @@ -1218,10 +1214,7 @@ def _generate_google_storage_signed_url( .first() ) if cache and cache.expires_at > expiration_time: - print("------------------2----------------") - private_key = json.loads(cache.gcp_private_key) - print(private_key) expires_at = cache.expires_at self._assume_role_cache_gs[proxy_group_id] = ( private_key, @@ -1235,9 +1228,6 @@ def _generate_google_storage_signed_url( private_key, key_db_entry = get_or_create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) - print("------------------3---------------") - print(private_key) - # Make sure the service account key expiration is later # than the expiration for the signed url. If it's not, we need to # provision a new service account key. @@ -1252,8 +1242,6 @@ def _generate_google_storage_signed_url( private_key = create_primary_service_account_key( user_id=user_id, username=username, proxy_group_id=proxy_group_id ) - print("------------------4----------------") - print(private_key) self._assume_role_cache_gs[proxy_group_id] = ( private_key, key_db_entry.expires, @@ -1261,9 +1249,6 @@ def _generate_google_storage_signed_url( db_entry = {} db_entry["gcp_proxy_group_id"] = proxy_group_id - print("------------------5----------------") - print(private_key) - print(type(private_key)) db_entry["gcp_private_key"] = json.dumps(private_key) db_entry["expires_at"] = key_db_entry.expires @@ -1302,9 +1287,6 @@ def _generate_google_storage_signed_url( if config["BILLING_PROJECT_FOR_SIGNED_URLS"] and not r_pays_project: r_pays_project = config["BILLING_PROJECT_FOR_SIGNED_URLS"] - print("------------------6----------------") - print(private_key) - final_url = cirrus.google_cloud.utils.get_signed_url( resource_path, http_verb, diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index 0133e59ec..956bf99ad 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -1,6 +1,7 @@ """ Test fence.blueprints.data.indexd.IndexedFile """ +import json from unittest import mock from mock import patch @@ -425,14 +426,19 @@ def test_internal_get_gs_signed_url_cache_new_key_if_old_key_expired( mock_google_service_account_key = GoogleServiceAccountKey() mock_google_service_account_key.expires = 10 mock_google_service_account_key.private_key = "key" + sa_private_key = { + "type": "service_account", + "project_id": "project_id", + "private_key": "pdashoidhaspidhaspidhiash", + } with mock.patch( "fence.blueprints.data.indexd.get_or_create_primary_service_account_key", - return_value=("sa_private_key", mock_google_service_account_key), + return_value=(sa_private_key, mock_google_service_account_key), ): with mock.patch( "fence.blueprints.data.indexd.create_primary_service_account_key", - return_value=("sa_private_key"), + return_value=(sa_private_key), ): with mock.patch.object( cirrus.google_cloud.utils, @@ -461,7 +467,7 @@ def test_internal_get_gs_signed_url_cache_new_key_if_old_key_expired( @mock.patch.object(utils, "_get_proxy_group_id", return_value=None) @mock.patch.object(indexd, "get_or_create_proxy_group_id", return_value="1") -def test_internal_get_gs_signed_url_clear_cache( +def test_internal_get_gs_signed_url_clear_cache_and_parse_json( mock_get_or_create_proxy_group_id, mock_get_proxy_group_id, app, @@ -477,15 +483,6 @@ def test_internal_get_gs_signed_url_clear_cache( create presigned url again make sure cache is set correctly """ - # db_session.add( - # AssumeRoleCacheGCP( - # gcp_proxy_group_id="1", - # expires_at=0, - # gcp_private_key="key", - # gcp_key_db_entry='{"1":("key", keydbentry)}', - # ) - # ) - # db_session.commit() indexd_record_with_non_public_authz_and_public_acl_populated = { "urls": [f"gs://some/location"], @@ -499,14 +496,19 @@ def test_internal_get_gs_signed_url_clear_cache( mock_google_service_account_key = GoogleServiceAccountKey() mock_google_service_account_key.expires = 10 mock_google_service_account_key.private_key = "key" + sa_private_key = { + "type": "service_account", + "project_id": "project_id", + "private_key": "pdashoidhaspidhaspidhiash", + } with mock.patch( "fence.blueprints.data.indexd.get_or_create_primary_service_account_key", - return_value=("sa_private_key", mock_google_service_account_key), + return_value=(sa_private_key, mock_google_service_account_key), ): with mock.patch( "fence.blueprints.data.indexd.create_primary_service_account_key", - return_value=("sa_private_key"), + return_value=(sa_private_key), ): with mock.patch.object( cirrus.google_cloud.utils, @@ -531,11 +533,16 @@ def test_internal_get_gs_signed_url_clear_cache( after_cache = db_session.query(AssumeRoleCacheGCP).first() assert after_cache - assert before_cache != after_cache assert ( str(type(after_cache)) == "" ) + # check if json loads can properly parse json string stored in cache + assert ( + str(type(json.loads(after_cache.gcp_private_key))) + == "" + ) + assert json.loads(after_cache.gcp_private_key) == sa_private_key db_session.delete(after_cache) cleared_cache = db_session.query(AssumeRoleCacheGCP).first() @@ -553,11 +560,15 @@ def test_internal_get_gs_signed_url_clear_cache( redo_cache = db_session.query(AssumeRoleCacheGCP).first() - print("----------------------------------------------------") - print(redo_cache.gcp_proxy_group_id) - assert redo_cache - assert cleared_cache != redo_cache + assert ( + str(type(redo_cache)) == "" + ) + assert ( + str(type(json.loads(redo_cache.gcp_private_key))) + == "" + ) + assert json.loads(redo_cache.gcp_private_key) == sa_private_key assert ( str(type(redo_cache)) == "" ) From fdaa3531cddb0b0d1b26bb05d667f2e139af3283 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 14:30:25 -0500 Subject: [PATCH 37/52] update travis for poetry --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 98314724b..2f7bdd4de 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,7 +11,7 @@ addons: postgresql: "9.6" install: - - curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python + - pip install poetry - source $HOME/.poetry/env - poetry install -vv - psql -c 'SELECT version();' -U postgres From c95a0539b43eba14363fcb2bcd9b648a3cad5aa6 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 14:43:16 -0500 Subject: [PATCH 38/52] add poetry shell --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 2f7bdd4de..f2360a57b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ addons: install: - pip install poetry - - source $HOME/.poetry/env + - poetry shell - poetry install -vv - psql -c 'SELECT version();' -U postgres - psql -U postgres -c "create database fence_test_tmp" From 0604f9a105afe0eacbadeedbb62c507366a5f8eb Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 14:49:59 -0500 Subject: [PATCH 39/52] remove shell --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f2360a57b..393a47fdf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,6 @@ addons: install: - pip install poetry - - poetry shell - poetry install -vv - psql -c 'SELECT version();' -U postgres - psql -U postgres -c "create database fence_test_tmp" From 34aad86d0f2dd167fc2cc8446a703389abe30eea Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 15:49:56 -0500 Subject: [PATCH 40/52] add in-mem test --- tests/data/test_indexed_file.py | 33 ++++++++++----------------------- 1 file changed, 10 insertions(+), 23 deletions(-) diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index 956bf99ad..fc9114364 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -475,7 +475,10 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( db_session, ): """ - Test fence.blueprints.data.indexd.GoogleStorageIndexedFileLocation._generate_google_storage_signed_url does not use cached key if its expired + Test fence.blueprints.data.indexd.GoogleStorageIndexedFileLocation._generate_google_storage_signed_url + Scenario: - Create presigned url, cache in-mem and in db + - Roll pods, which removes in-mem cache but keeps db entry + - Make sure in-mem is populated correctly when creating presigned url again create presigned url set cache in db @@ -530,24 +533,16 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( r_pays_project=None, ) + assert google_object._assume_role_cache_gs["1"][0] == sa_private_key + after_cache = db_session.query(AssumeRoleCacheGCP).first() assert after_cache - assert ( - str(type(after_cache)) - == "" - ) # check if json loads can properly parse json string stored in cache - assert ( - str(type(json.loads(after_cache.gcp_private_key))) - == "" - ) assert json.loads(after_cache.gcp_private_key) == sa_private_key - db_session.delete(after_cache) - cleared_cache = db_session.query(AssumeRoleCacheGCP).first() - - assert cleared_cache is None + # make sure cache is added back in the proper format after clearing + google_object._assume_role_cache_gs = {} google_object._generate_google_storage_signed_url( http_verb="GET", @@ -558,20 +553,12 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( r_pays_project=None, ) + assert google_object._assume_role_cache_gs["1"][0] == sa_private_key + redo_cache = db_session.query(AssumeRoleCacheGCP).first() assert redo_cache - assert ( - str(type(redo_cache)) == "" - ) - assert ( - str(type(json.loads(redo_cache.gcp_private_key))) - == "" - ) assert json.loads(redo_cache.gcp_private_key) == sa_private_key - assert ( - str(type(redo_cache)) == "" - ) def test_set_acl_missing_unauthorized( From dbe59f7ea864b856717d0d9576c1e1d55e226834 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 15:53:12 -0500 Subject: [PATCH 41/52] add try-except --- tests/data/test_indexed_file.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index fc9114364..a6457cc0e 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -533,8 +533,10 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( r_pays_project=None, ) - assert google_object._assume_role_cache_gs["1"][0] == sa_private_key - + try: + assert google_object._assume_role_cache_gs["1"][0] == sa_private_key + except Exception: + pytest.fail("Could not json.loads(cache)") after_cache = db_session.query(AssumeRoleCacheGCP).first() assert after_cache @@ -558,7 +560,10 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( redo_cache = db_session.query(AssumeRoleCacheGCP).first() assert redo_cache - assert json.loads(redo_cache.gcp_private_key) == sa_private_key + try: + assert json.loads(redo_cache.gcp_private_key) == sa_private_key + except Exception: + pytest.fail("Could not json.loads(cache)") def test_set_acl_missing_unauthorized( From 5682eda116306e905cd9d397adfd48243a580eb5 Mon Sep 17 00:00:00 2001 From: BinamB Date: Tue, 6 Sep 2022 16:14:08 -0500 Subject: [PATCH 42/52] fix try catch --- tests/data/test_indexed_file.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index a6457cc0e..02d2ef55d 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -533,15 +533,16 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( r_pays_project=None, ) - try: - assert google_object._assume_role_cache_gs["1"][0] == sa_private_key - except Exception: - pytest.fail("Could not json.loads(cache)") + assert google_object._assume_role_cache_gs["1"][0] == sa_private_key + after_cache = db_session.query(AssumeRoleCacheGCP).first() assert after_cache # check if json loads can properly parse json string stored in cache - assert json.loads(after_cache.gcp_private_key) == sa_private_key + try: + assert json.loads(after_cache.gcp_private_key) == sa_private_key + except Exception: + pytest.fail("Could not json.loads(cache)") # make sure cache is added back in the proper format after clearing google_object._assume_role_cache_gs = {} From baa899e042138ec573180517551976cca35b3369 Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 7 Sep 2022 09:05:23 -0500 Subject: [PATCH 43/52] change tests --- tests/data/test_indexed_file.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index 02d2ef55d..fb01c2764 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -539,10 +539,9 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( assert after_cache # check if json loads can properly parse json string stored in cache - try: - assert json.loads(after_cache.gcp_private_key) == sa_private_key - except Exception: - pytest.fail("Could not json.loads(cache)") + assert "1" in google_object._assume_role_cache_gs + assert len(google_object._assume_role_cache_gs["1"]) > 1 + assert google_object._assume_role_cache_gs["1"][0] == sa_private_key # make sure cache is added back in the proper format after clearing google_object._assume_role_cache_gs = {} @@ -561,10 +560,10 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( redo_cache = db_session.query(AssumeRoleCacheGCP).first() assert redo_cache - try: - assert json.loads(redo_cache.gcp_private_key) == sa_private_key - except Exception: - pytest.fail("Could not json.loads(cache)") + # check if json loads can properly parse json string stored in cache + assert "1" in google_object._assume_role_cache_gs + assert len(google_object._assume_role_cache_gs["1"]) > 1 + assert google_object._assume_role_cache_gs["1"][0] == sa_private_key def test_set_acl_missing_unauthorized( From 417c389598fe11cbb8d7ecf6aa593eb8a59cd07b Mon Sep 17 00:00:00 2001 From: Alexander VanTol Date: Wed, 7 Sep 2022 10:27:18 -0500 Subject: [PATCH 44/52] Update tests/data/test_indexed_file.py --- tests/data/test_indexed_file.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index fb01c2764..74b3ea5b8 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -555,7 +555,6 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( r_pays_project=None, ) - assert google_object._assume_role_cache_gs["1"][0] == sa_private_key redo_cache = db_session.query(AssumeRoleCacheGCP).first() From 428a2a1a13378013fbe92b8076706954a456084b Mon Sep 17 00:00:00 2001 From: BinamB Date: Wed, 7 Sep 2022 10:48:16 -0500 Subject: [PATCH 45/52] remove redundant assert --- tests/data/test_indexed_file.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/data/test_indexed_file.py b/tests/data/test_indexed_file.py index 74b3ea5b8..5988f75fd 100755 --- a/tests/data/test_indexed_file.py +++ b/tests/data/test_indexed_file.py @@ -555,7 +555,6 @@ def test_internal_get_gs_signed_url_clear_cache_and_parse_json( r_pays_project=None, ) - redo_cache = db_session.query(AssumeRoleCacheGCP).first() assert redo_cache From 65657b12e7335674a4310acf40355d42e5d7d28d Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 8 Sep 2022 09:40:58 -0700 Subject: [PATCH 46/52] Chore: Testing codecoverage.yaml --- .github/workflows/codecoverage.yaml | 56 ----------------------------- .github/workflows/pipeline.yaml | 2 +- 2 files changed, 1 insertion(+), 57 deletions(-) delete mode 100644 .github/workflows/codecoverage.yaml diff --git a/.github/workflows/codecoverage.yaml b/.github/workflows/codecoverage.yaml deleted file mode 100644 index 09d6355bd..000000000 --- a/.github/workflows/codecoverage.yaml +++ /dev/null @@ -1,56 +0,0 @@ -name: test - -on: push - -jobs: - test: - runs-on: ubuntu-latest - steps: - #---------------------------------------------- - # check-out repo and set-up python - #---------------------------------------------- - - name: Check out repository - uses: actions/checkout@v2 - - name: Set up python - id: setup-python - uses: actions/setup-python@v2 - with: - python-version: 3.9 - #---------------------------------------------- - # ----- install & configure poetry ----- - #---------------------------------------------- - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - virtualenvs-create: true - virtualenvs-in-project: true - installer-parallel: true - - #---------------------------------------------- - # load cached venv if cache exists - #---------------------------------------------- - - name: Load cached venv - id: cached-poetry-dependencies - uses: actions/cache@v2 - with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - #---------------------------------------------- - # install dependencies if cache does not exist - #---------------------------------------------- - - name: Install dependencies - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - run: poetry install --no-interaction --no-root - #---------------------------------------------- - # install your root project, if required - #---------------------------------------------- - - name: Install library - run: poetry install --no-interaction - #---------------------------------------------- - # run test suite - #---------------------------------------------- - - name: Run tests - run: | - source .venv/bin/activate - pytest tests/ - coverage report diff --git a/.github/workflows/pipeline.yaml b/.github/workflows/pipeline.yaml index 50e89cf00..fedd8cfa6 100644 --- a/.github/workflows/pipeline.yaml +++ b/.github/workflows/pipeline.yaml @@ -13,7 +13,7 @@ concurrency: jobs: Security: name: Security Pipeline - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@master + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@dso-13_code_coverage secrets: inherit ci: From 17323b3f6d2031a830d553c41f543f8c617c0ca6 Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 8 Sep 2022 09:42:52 -0700 Subject: [PATCH 47/52] Empty-Commit From 7cf5982d3c9049481151e69c6f9c592228496733 Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 8 Sep 2022 09:43:46 -0700 Subject: [PATCH 48/52] Chore: Testing codecoverage.yaml --- .github/workflows/pipeline.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pipeline.yaml b/.github/workflows/pipeline.yaml index fedd8cfa6..da01fa4f0 100644 --- a/.github/workflows/pipeline.yaml +++ b/.github/workflows/pipeline.yaml @@ -13,7 +13,7 @@ concurrency: jobs: Security: name: Security Pipeline - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@dso-13_code_coverage + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@sdo-13_code_coverage secrets: inherit ci: From 20993f0319c5ef5c5e781308169378bface09649 Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 8 Sep 2022 10:03:12 -0700 Subject: [PATCH 49/52] Chore: Testing codecoverage.yaml --- .github/workflows/pipeline.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pipeline.yaml b/.github/workflows/pipeline.yaml index da01fa4f0..664be4aeb 100644 --- a/.github/workflows/pipeline.yaml +++ b/.github/workflows/pipeline.yaml @@ -13,7 +13,7 @@ concurrency: jobs: Security: name: Security Pipeline - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@sdo-13_code_coverage + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@dev secrets: inherit ci: From da7bc8f95c4964345c3314b0a2a703a96dde2dcc Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 8 Sep 2022 10:25:41 -0700 Subject: [PATCH 50/52] Empty-Commit From 1ef8170dd1792fe673bdb3e0812b239fde69f704 Mon Sep 17 00:00:00 2001 From: Clint Date: Thu, 8 Sep 2022 10:55:37 -0700 Subject: [PATCH 51/52] Chore: Testing codecoverage.yaml --- .github/workflows/{pipeline.yaml => buildpipeline.yaml} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename .github/workflows/{pipeline.yaml => buildpipeline.yaml} (98%) diff --git a/.github/workflows/pipeline.yaml b/.github/workflows/buildpipeline.yaml similarity index 98% rename from .github/workflows/pipeline.yaml rename to .github/workflows/buildpipeline.yaml index 664be4aeb..50e89cf00 100644 --- a/.github/workflows/pipeline.yaml +++ b/.github/workflows/buildpipeline.yaml @@ -13,7 +13,7 @@ concurrency: jobs: Security: name: Security Pipeline - uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@dev + uses: uc-cdis/.github/.github/workflows/securitypipeline.yaml@master secrets: inherit ci: From d801fa0b1ccd5ce1077dc4519917076246bde416 Mon Sep 17 00:00:00 2001 From: Alexander VanTol Date: Mon, 12 Sep 2022 15:06:36 -0500 Subject: [PATCH 52/52] Update buildpipeline.yaml --- .github/workflows/buildpipeline.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/buildpipeline.yaml b/.github/workflows/buildpipeline.yaml index 50e89cf00..e8a4745c2 100644 --- a/.github/workflows/buildpipeline.yaml +++ b/.github/workflows/buildpipeline.yaml @@ -18,7 +18,8 @@ jobs: ci: name: Build Image and Push - needs: Security + # TODO Add this line back once we update to Python 3.9 from 3.6 + # needs: Security uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master secrets: ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}